Compare commits
7 Commits
install-st
...
cws/pumiFi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9dead10d70 | ||
|
|
63ba7db2d2 | ||
|
|
19c0208c1a | ||
|
|
9682347254 | ||
|
|
f4f7309504 | ||
|
|
071a34df27 | ||
|
|
8d35a8498b |
6
.github/pull_request_template.md
vendored
6
.github/pull_request_template.md
vendored
@@ -1,6 +0,0 @@
|
||||
<!--
|
||||
Remember that `spackbot` can help with your PR in multiple ways:
|
||||
- `@spackbot help` shows all the commands that are currently available
|
||||
- `@spackbot fix style` tries to push a commit to fix style issues in this PR
|
||||
- `@spackbot re-run pipeline` runs the pipelines again, if you have write access to the repository
|
||||
-->
|
||||
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
$(which spack) audit externals
|
||||
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044 # @v2.1.0
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
|
||||
4
.github/workflows/build-containers.yml
vendored
4
.github/workflows/build-containers.yml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
- uses: docker/metadata-action@dbef88086f6cef02e264edb7dbf63250c17cef6c
|
||||
id: docker_meta
|
||||
with:
|
||||
images: |
|
||||
@@ -118,5 +118,7 @@ jobs:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@ebc4d7e9ebcb0b1eb21480bb8f43113e996ac77a
|
||||
- uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50
|
||||
id: filter
|
||||
with:
|
||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||
|
||||
4
.github/workflows/style/requirements.txt
vendored
4
.github/workflows/style/requirements.txt
vendored
@@ -1,5 +1,5 @@
|
||||
black==24.2.0
|
||||
clingo==5.7.1
|
||||
black==23.12.1
|
||||
clingo==5.6.2
|
||||
flake8==7.0.0
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
|
||||
8
.github/workflows/unit_tests.yaml
vendored
8
.github/workflows/unit_tests.yaml
vendored
@@ -91,7 +91,7 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
# Test shell integration
|
||||
@@ -122,7 +122,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
|
||||
@@ -181,7 +181,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044 # @v2.1.0
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
# Run unit tests on MacOS
|
||||
@@ -216,6 +216,6 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,macos
|
||||
|
||||
4
.github/workflows/windows_python.yml
vendored
4
.github/workflows/windows_python.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,windows
|
||||
unit-tests-cmd:
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,windows
|
||||
build-abseil:
|
||||
|
||||
@@ -1130,10 +1130,6 @@ A version specifier can also be a list of ranges and specific versions,
|
||||
separated by commas. For example, ``@1.0:1.5,=1.7.1`` matches any version
|
||||
in the range ``1.0:1.5`` and the specific version ``1.7.1``.
|
||||
|
||||
^^^^^^^^^^^^
|
||||
Git versions
|
||||
^^^^^^^^^^^^
|
||||
|
||||
For packages with a ``git`` attribute, ``git`` references
|
||||
may be specified instead of a numerical version i.e. branches, tags
|
||||
and commits. Spack will stage and build based off the ``git``
|
||||
|
||||
@@ -199,7 +199,6 @@ def setup(sphinx):
|
||||
("py:class", "contextlib.contextmanager"),
|
||||
("py:class", "module"),
|
||||
("py:class", "_io.BufferedReader"),
|
||||
("py:class", "_io.BytesIO"),
|
||||
("py:class", "unittest.case.TestCase"),
|
||||
("py:class", "_frozen_importlib_external.SourceFileLoader"),
|
||||
("py:class", "clingo.Control"),
|
||||
@@ -216,7 +215,6 @@ def setup(sphinx):
|
||||
("py:class", "spack.spec.InstallStatus"),
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
||||
@@ -357,23 +357,91 @@ If there is a hook that you would like and is missing, you can propose to add a
|
||||
``pre_install(spec)``
|
||||
"""""""""""""""""""""
|
||||
|
||||
A ``pre_install`` hook is run within the install subprocess, directly before the install starts.
|
||||
It expects a single argument of a spec.
|
||||
A ``pre_install`` hook is run within an install subprocess, directly before
|
||||
the install starts. It expects a single argument of a spec, and is run in
|
||||
a multiprocessing subprocess. Note that if you see ``pre_install`` functions associated with packages these are not hooks
|
||||
as we have defined them here, but rather callback functions associated with
|
||||
a package install.
|
||||
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
``post_install(spec, explicit=None)``
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
""""""""""""""""""""""
|
||||
``post_install(spec)``
|
||||
""""""""""""""""""""""
|
||||
|
||||
A ``post_install`` hook is run within the install subprocess, directly after the install finishes,
|
||||
but before the build stage is removed and the spec is registered in the database. It expects two
|
||||
arguments: spec and an optional boolean indicating whether this spec is being installed explicitly.
|
||||
A ``post_install`` hook is run within an install subprocess, directly after
|
||||
the install finishes, but before the build stage is removed. If you
|
||||
write one of these hooks, you should expect it to accept a spec as the only
|
||||
argument. This is run in a multiprocessing subprocess. This ``post_install`` is
|
||||
also seen in packages, but in this context not related to the hooks described
|
||||
here.
|
||||
|
||||
""""""""""""""""""""""""""""""""""""""""""""""""""""
|
||||
``pre_uninstall(spec)`` and ``post_uninstall(spec)``
|
||||
""""""""""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
These hooks are currently used for cleaning up module files after uninstall.
|
||||
""""""""""""""""""""""""""
|
||||
``on_install_start(spec)``
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
This hook is run at the beginning of ``lib/spack/spack/installer.py``,
|
||||
in the install function of a ``PackageInstaller``,
|
||||
and importantly is not part of a build process, but before it. This is when
|
||||
we have just newly grabbed the task, and are preparing to install. If you
|
||||
write a hook of this type, you should provide the spec to it.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def on_install_start(spec):
|
||||
"""On start of an install, we want to...
|
||||
"""
|
||||
print('on_install_start')
|
||||
|
||||
|
||||
""""""""""""""""""""""""""""
|
||||
``on_install_success(spec)``
|
||||
""""""""""""""""""""""""""""
|
||||
|
||||
This hook is run on a successful install, and is also run inside the build
|
||||
process, akin to ``post_install``. The main difference is that this hook
|
||||
is run outside of the context of the stage directory, meaning after the
|
||||
build stage has been removed and the user is alerted that the install was
|
||||
successful. If you need to write a hook that is run on success of a particular
|
||||
phase, you should use ``on_phase_success``.
|
||||
|
||||
""""""""""""""""""""""""""""
|
||||
``on_install_failure(spec)``
|
||||
""""""""""""""""""""""""""""
|
||||
|
||||
This hook is run given an install failure that happens outside of the build
|
||||
subprocess, but somewhere in ``installer.py`` when something else goes wrong.
|
||||
If you need to write a hook that is relevant to a failure within a build
|
||||
process, you would want to instead use ``on_phase_failure``.
|
||||
|
||||
|
||||
"""""""""""""""""""""""""""
|
||||
``on_install_cancel(spec)``
|
||||
"""""""""""""""""""""""""""
|
||||
|
||||
The same, but triggered if a spec install is cancelled for any reason.
|
||||
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""""""""""
|
||||
``on_phase_success(pkg, phase_name, log_file)``
|
||||
"""""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
This hook is run within the install subprocess, and specifically when a phase
|
||||
successfully finishes. Since we are interested in the package, the name of
|
||||
the phase, and any output from it, we require:
|
||||
|
||||
- **pkg**: the package variable, which also has the attached spec at ``pkg.spec``
|
||||
- **phase_name**: the name of the phase that was successful (e.g., configure)
|
||||
- **log_file**: the path to the file with output, in case you need to inspect or otherwise interact with it.
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""""""""
|
||||
``on_phase_error(pkg, phase_name, log_file)``
|
||||
"""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
In the case of an error during a phase, we might want to trigger some event
|
||||
with a hook, and this is the purpose of this particular hook. Akin to
|
||||
``on_phase_success`` we require the same variables - the package that failed,
|
||||
the name of the phase, and the log file where we might find errors.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -142,21 +142,6 @@ user's prompt to begin with the environment name in brackets.
|
||||
$ spack env activate -p myenv
|
||||
[myenv] $ ...
|
||||
|
||||
The ``activate`` command can also be used to create a new environment, if it is
|
||||
not already defined, by adding the ``--create`` flag. Managed and anonymous
|
||||
environments, anonymous environments are explained in the next section,
|
||||
can both be created using the same flags that `spack env create` accepts.
|
||||
If an environment already exists then spack will simply activate it and ignore the
|
||||
create specific flags.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate --create -p myenv
|
||||
# ...
|
||||
# [creates if myenv does not exist yet]
|
||||
# ...
|
||||
[myenv] $ ...
|
||||
|
||||
To deactivate an environment, use the command:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -416,23 +401,6 @@ that git clone if ``foo`` is in the environment.
|
||||
Further development on ``foo`` can be tested by reinstalling the environment,
|
||||
and eventually committed and pushed to the upstream git repo.
|
||||
|
||||
If the package being developed supports out-of-source builds then users can use the
|
||||
``--build_directory`` flag to control the location and name of the build directory.
|
||||
This is a shortcut to set the ``package_attributes:build_directory`` in the
|
||||
``packages`` configuration (see :ref:`assigning-package-attributes`).
|
||||
The supplied location will become the build-directory for that package in all future builds.
|
||||
|
||||
.. warning::
|
||||
Potential pitfalls of setting the build directory
|
||||
Spack does not check for out-of-source build compatibility with the packages and
|
||||
so the onerous of making sure the package supports out-of-source builds is on
|
||||
the user.
|
||||
For example, most ``autotool`` and ``makefile`` packages do not support out-of-source builds
|
||||
while all ``CMake`` packages do.
|
||||
Understanding these nuances are on the software developers and we strongly encourage
|
||||
developers to only redirect the build directory if they understand their package's
|
||||
build-system.
|
||||
|
||||
^^^^^^^
|
||||
Loading
|
||||
^^^^^^^
|
||||
@@ -489,11 +457,11 @@ a ``packages.yaml`` file) could contain:
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
...
|
||||
packages:
|
||||
all:
|
||||
compiler: [intel]
|
||||
# ...
|
||||
...
|
||||
|
||||
This configuration sets the default compiler for all packages to
|
||||
``intel``.
|
||||
@@ -839,7 +807,7 @@ directories.
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
...
|
||||
view:
|
||||
mpis:
|
||||
root: /path/to/view
|
||||
@@ -883,7 +851,7 @@ automatically named ``default``, so that
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
...
|
||||
view: True
|
||||
|
||||
is equivalent to
|
||||
@@ -891,7 +859,7 @@ is equivalent to
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
...
|
||||
view:
|
||||
default:
|
||||
root: .spack-env/view
|
||||
@@ -901,7 +869,7 @@ and
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
...
|
||||
view: /path/to/view
|
||||
|
||||
is equivalent to
|
||||
@@ -909,7 +877,7 @@ is equivalent to
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
...
|
||||
view:
|
||||
default:
|
||||
root: /path/to/view
|
||||
|
||||
@@ -623,7 +623,7 @@ Fortran.
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
# ...
|
||||
...
|
||||
paths:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
|
||||
@@ -10,7 +10,7 @@ Modules (modules.yaml)
|
||||
======================
|
||||
|
||||
The use of module systems to manage user environment in a controlled way
|
||||
is a common practice at HPC centers that is sometimes embraced also by
|
||||
is a common practice at HPC centers that is often embraced also by
|
||||
individual programmers on their development machines. To support this
|
||||
common practice Spack integrates with `Environment Modules
|
||||
<http://modules.sourceforge.net/>`_ and `Lmod
|
||||
@@ -21,38 +21,14 @@ Modules are one of several ways you can use Spack packages. For other
|
||||
options that may fit your use case better, you should also look at
|
||||
:ref:`spack load <spack-load>` and :ref:`environments <environments>`.
|
||||
|
||||
-----------
|
||||
Quick start
|
||||
-----------
|
||||
----------------------------
|
||||
Using module files via Spack
|
||||
----------------------------
|
||||
|
||||
In the current version of Spack, module files are not generated by default. To get started, you
|
||||
can generate module files for all currently installed packages by running either
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack module tcl refresh
|
||||
|
||||
or
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack module lmod refresh
|
||||
|
||||
Spack can also generate module files for all future installations automatically through the
|
||||
following configuration:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack config add modules:default:enable:[tcl]
|
||||
|
||||
or
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack config add modules:default:enable:[lmod]
|
||||
|
||||
Assuming you have a module system installed, you should now be able to use the ``module`` command
|
||||
to interact with them:
|
||||
If you have installed a supported module system you should be able to
|
||||
run ``module avail`` to see what module
|
||||
files have been installed. Here is sample output of those programs,
|
||||
showing lots of installed packages:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -89,17 +65,33 @@ scheme used at your site.
|
||||
Module file customization
|
||||
-------------------------
|
||||
|
||||
Module files are generated by post-install hooks after the successful
|
||||
installation of a package.
|
||||
|
||||
.. note::
|
||||
|
||||
Spack only generates modulefiles when a package is installed. If
|
||||
you attempt to install a package and it is already installed, Spack
|
||||
will not regenerate modulefiles for the package. This may lead to
|
||||
inconsistent modulefiles if the Spack module configuration has
|
||||
changed since the package was installed, either by editing a file
|
||||
or changing scopes or environments.
|
||||
|
||||
Later in this section there is a subsection on :ref:`regenerating
|
||||
modules <cmd-spack-module-refresh>` that will allow you to bring
|
||||
your modules to a consistent state.
|
||||
|
||||
The table below summarizes the essential information associated with
|
||||
the different file formats that can be generated by Spack:
|
||||
|
||||
|
||||
+-----------+--------------+------------------------------+----------------------------------------------+----------------------+
|
||||
| | Hierarchical | **Default root directory** | **Default template file** | **Compatible tools** |
|
||||
+===========+==============+==============================+==============================================+======================+
|
||||
| ``tcl`` | No | share/spack/modules | share/spack/templates/modules/modulefile.tcl | Env. Modules/Lmod |
|
||||
+-----------+--------------+------------------------------+----------------------------------------------+----------------------+
|
||||
| ``lmod`` | Yes | share/spack/lmod | share/spack/templates/modules/modulefile.lua | Lmod |
|
||||
+-----------+--------------+------------------------------+----------------------------------------------+----------------------+
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
|
||||
| | **Hook name** | **Default root directory** | **Default template file** | **Compatible tools** |
|
||||
+=============================+====================+===============================+==============================================+======================+
|
||||
| **Tcl - Non-Hierarchical** | ``tcl`` | share/spack/modules | share/spack/templates/modules/modulefile.tcl | Env. Modules/Lmod |
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
|
||||
| **Lua - Hierarchical** | ``lmod`` | share/spack/lmod | share/spack/templates/modules/modulefile.lua | Lmod |
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
|
||||
|
||||
|
||||
Spack ships with sensible defaults for the generation of module files, but
|
||||
@@ -110,7 +102,7 @@ In general you can override or extend the default behavior by:
|
||||
2. writing specific rules in the ``modules.yaml`` configuration file
|
||||
3. writing your own templates to override or extend the defaults
|
||||
|
||||
The former method lets you express changes in the run-time environment
|
||||
The former method let you express changes in the run-time environment
|
||||
that are needed to use the installed software properly, e.g. injecting variables
|
||||
from language interpreters into their extensions. The latter two instead permit to
|
||||
fine tune the filesystem layout, content and creation of module files to meet
|
||||
@@ -118,62 +110,79 @@ site specific conventions.
|
||||
|
||||
.. _overide-api-calls-in-package-py:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting environment variables dynamically in ``package.py``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Override API calls in ``package.py``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
There are two methods that you can implement in any ``package.py`` to dynamically affect the
|
||||
content of the module files generated by Spack. The most important one is
|
||||
``setup_run_environment``, which can be used to set environment variables in the module file that
|
||||
depend on the spec:
|
||||
There are two methods that you can override in any ``package.py`` to affect the
|
||||
content of the module files generated by Spack. The first one:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
if self.spec.satisfies("+foo"):
|
||||
env.set("FOO", "bar")
|
||||
pass
|
||||
|
||||
The second, less commonly used, is ``setup_dependent_run_environment(self, env, dependent_spec)``,
|
||||
which allows a dependency to set variables in the module file of its dependents. This is typically
|
||||
used in packages like ``python``, ``r``, or ``perl`` to prepend the dependent's prefix to the
|
||||
search path of the interpreter (``PYTHONPATH``, ``R_LIBS``, ``PERL5LIB`` resp.), so it can locate
|
||||
the packages at runtime.
|
||||
|
||||
For example, a simplified version of the ``python`` package could look like this:
|
||||
can alter the content of the module file associated with the same package where it is overridden.
|
||||
The second method:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_dependent_run_environment(self, env, dependent_spec):
|
||||
if dependent_spec.package.extends(self.spec):
|
||||
env.prepend_path("PYTHONPATH", dependent_spec.prefix.lib.python)
|
||||
pass
|
||||
|
||||
and would make any package that ``extends("python")`` have its library directory added to the
|
||||
``PYTHONPATH`` environment variable in the module file. It's much more convenient to set this
|
||||
variable here, than to repeat it in every Python extension's ``setup_run_environment`` method.
|
||||
can instead inject run-time environment modifications in the module files of packages
|
||||
that depend on it. In both cases you need to fill ``env`` with the desired
|
||||
list of environment modifications.
|
||||
|
||||
.. admonition:: The ``r`` package and callback APIs
|
||||
|
||||
An example in which it is crucial to override both methods
|
||||
is given by the ``r`` package. This package installs libraries and headers
|
||||
in non-standard locations and it is possible to prepend the appropriate directory
|
||||
to the corresponding environment variables:
|
||||
|
||||
================== =================================
|
||||
LD_LIBRARY_PATH ``self.prefix/rlib/R/lib``
|
||||
PKG_CONFIG_PATH ``self.prefix/rlib/pkgconfig``
|
||||
================== =================================
|
||||
|
||||
with the following snippet:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/r/package.py
|
||||
:pyobject: R.setup_run_environment
|
||||
|
||||
The ``r`` package also knows which environment variable should be modified
|
||||
to make language extensions provided by other packages available, and modifies
|
||||
it appropriately in the override of the second method:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/r/package.py
|
||||
:pyobject: R.setup_dependent_run_environment
|
||||
|
||||
.. _modules-yaml:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
The ``modules.yaml`` config file and module sets
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Write a configuration file
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The configuration files that control module generation behavior are named ``modules.yaml``. The
|
||||
default configuration looks like this:
|
||||
The configuration files that control module generation behavior
|
||||
are named ``modules.yaml``. The default configuration:
|
||||
|
||||
.. literalinclude:: _spack_root/etc/spack/defaults/modules.yaml
|
||||
:language: yaml
|
||||
|
||||
You can define one or more **module sets**, each of which can be configured separately with regard
|
||||
to install location, naming scheme, inclusion and exclusion, autoloading, et cetera.
|
||||
activates the hooks to generate ``tcl`` module files and inspects
|
||||
the installation folder of each package for the presence of a set of subdirectories
|
||||
(``bin``, ``man``, ``share/man``, etc.). If any is found its full path is prepended
|
||||
to the environment variables listed below the folder name.
|
||||
|
||||
The default module set is aptly named ``default``. All
|
||||
:ref:`Spack commands that operate on modules <maintaining-module-files>` apply to the ``default``
|
||||
module set, unless another module set is specified explicitly (with the ``--name`` flag).
|
||||
Spack modules can be configured for multiple module sets. The default
|
||||
module set is named ``default``. All Spack commands which operate on
|
||||
modules default to apply the ``default`` module set, but can be
|
||||
applied to any module set in the configuration.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""
|
||||
Changing the modules root
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""
|
||||
|
||||
As shown in the table above, the default module root for ``lmod`` is
|
||||
``$spack/share/spack/lmod`` and the default root for ``tcl`` is
|
||||
@@ -189,7 +198,7 @@ set by changing the ``roots`` key of the configuration.
|
||||
my_custom_lmod_modules:
|
||||
roots:
|
||||
lmod: /path/to/install/custom/lmod/modules
|
||||
# ...
|
||||
...
|
||||
|
||||
This configuration will create two module sets. The default module set
|
||||
will install its ``tcl`` modules to ``/path/to/install/tcl/modules``
|
||||
@@ -215,32 +224,25 @@ location could be confusing to users of your modules. In the next
|
||||
section, we will discuss enabling and disabling module types (module
|
||||
file generators) for each module set.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Automatically generating module files
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""
|
||||
Activate other hooks
|
||||
""""""""""""""""""""
|
||||
|
||||
Spack can be configured to automatically generate module files as part of package installation.
|
||||
This is done by adding the desired module systems to the ``enable`` list.
|
||||
Any other module file generator shipped with Spack can be activated adding it to the
|
||||
list under the ``enable`` key in the module file. Currently the only generator that
|
||||
is not active by default is ``lmod``, which produces hierarchical lua module files.
|
||||
|
||||
Each module system can then be configured separately. In fact, you should list configuration
|
||||
options that affect a particular type of module files under a top level key corresponding
|
||||
to the generator being customized:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
default:
|
||||
enable:
|
||||
- tcl
|
||||
- lmod
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuring ``tcl`` and ``lmod`` modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can configure the behavior of either module system separately, under a key corresponding to
|
||||
the generator being customized:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
default:
|
||||
- tcl
|
||||
- lmod
|
||||
tcl:
|
||||
# contains environment modules specific customizations
|
||||
lmod:
|
||||
@@ -251,70 +253,16 @@ either change the layout of the module files on the filesystem, or they will aff
|
||||
their content. For the latter point it is possible to use anonymous specs
|
||||
to fine tune the set of packages on which the modifications should be applied.
|
||||
|
||||
.. _autoloading-dependencies:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Autoloading and hiding dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A module file should set the variables that are needed for an application to work. But since an
|
||||
application often has many dependencies, where should all the environment variables for those be
|
||||
set? In Spack the rule is that each package sets the runtime variables that are needed by the
|
||||
package itself, and no more. This way, dependencies can be loaded standalone too, and duplication
|
||||
of environment variables is avoided.
|
||||
|
||||
That means however that if you want to use an application, you need to load the modules for all its
|
||||
dependencies. Of course this is not something you would want users to do manually.
|
||||
|
||||
Since Spack knows the dependency graph of every package, it can easily generate module files that
|
||||
automatically load the modules for its dependencies recursively. It is enabled by default for both
|
||||
Lmod and Environment Modules under the ``autoload: direct`` config option. The former system has
|
||||
builtin support through the ``depends_on`` function, the latter simply uses a ``module load``
|
||||
statement. Both module systems (at least in newer versions) do reference counting, so that if a
|
||||
module is loaded by two different modules, it will only be unloaded after the others are.
|
||||
|
||||
The ``autoload`` key accepts the values ``none``, ``direct``, and ``all``. To disable it, use
|
||||
``none``, and to enable, it's best to stick to ``direct``, which only autoloads the direct link and
|
||||
run type dependencies, relying on recursive autoloading to load the rest.
|
||||
|
||||
A common complaint about autoloading is the large number of modules that are visible to the user.
|
||||
Spack has a solution for this as well: ``hide_implicits: true``. This ensures that only those
|
||||
packages you've explicitly installed are exposed by ``module avail``, but still allows for
|
||||
autoloading of hidden dependencies. Lmod should support hiding implicits in general, while
|
||||
Environment Modules requires version 4.7 or higher.
|
||||
|
||||
.. note::
|
||||
If supported by your module system, we highly encourage the following configuration that enables
|
||||
autoloading and hiding of implicits. It ensures all runtime variables are set correctly,
|
||||
including those for dependencies, without overwhelming the user with a large number of available
|
||||
modules. Further, it makes it easier to get readable module names without collisions, see the
|
||||
section below on :ref:`modules-projections`.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
default:
|
||||
tcl:
|
||||
hide_implicits: true
|
||||
all:
|
||||
autoload: direct
|
||||
lmod:
|
||||
hide_implicits: true
|
||||
all:
|
||||
autoload: direct
|
||||
|
||||
.. _anonymous_specs:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting environment variables for selected packages in config
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""
|
||||
Selection by anonymous specs
|
||||
""""""""""""""""""""""""""""
|
||||
|
||||
In the configuration file you can filter particular specs, and make further changes to the
|
||||
environment variables that go into their module files. This is very powerful when you want to avoid
|
||||
:ref:`modifying the package itself <overide-api-calls-in-package-py>`, or when you want to set
|
||||
certain variables on multiple selected packages at once.
|
||||
|
||||
For instance, in the snippet below:
|
||||
In the configuration file you can use *anonymous specs* (i.e. specs
|
||||
that **are not required to have a root package** and are thus used just
|
||||
to express constraints) to apply certain modifications on a selected set
|
||||
of the installed software. For instance, in the snippet below:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -357,28 +305,12 @@ the variable ``FOOBAR`` will be unset.
|
||||
.. note::
|
||||
Order does matter
|
||||
The modifications associated with the ``all`` keyword are always evaluated
|
||||
first, no matter where they appear in the configuration file. All the other changes to
|
||||
environment variables for matching specs are evaluated from top to bottom.
|
||||
first, no matter where they appear in the configuration file. All the other
|
||||
spec constraints are instead evaluated top to bottom.
|
||||
|
||||
.. warning::
|
||||
|
||||
As general advice, it's often better to set as few unnecessary variables as possible. For
|
||||
example, the following seemingly innocent and potentially useful configuration
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
all:
|
||||
environment:
|
||||
set:
|
||||
"{name}_ROOT": "{prefix}"
|
||||
|
||||
sets ``BINUTILS_ROOT`` to its prefix in modules for ``binutils``, which happens to break
|
||||
the ``gcc`` compiler: it uses this variable as its default search path for certain object
|
||||
files and libraries, and by merely setting it, everything fails to link.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""""""""""""""""""
|
||||
Exclude or include specific module files
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
You can use anonymous specs also to prevent module files from being written or
|
||||
to force them to be written. Consider the case where you want to hide from users
|
||||
@@ -398,19 +330,14 @@ you will prevent the generation of module files for any package that
|
||||
is compiled with ``gcc@4.4.7``, with the only exception of any ``gcc``
|
||||
or any ``llvm`` installation.
|
||||
|
||||
It is safe to combine ``exclude`` and ``autoload``
|
||||
:ref:`mentioned above <autoloading-dependencies>`. When ``exclude`` prevents a module file to be
|
||||
generated for a dependency, the ``autoload`` feature will simply not generate a statement to load
|
||||
it.
|
||||
|
||||
|
||||
.. _modules-projections:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""""""""
|
||||
Customize the naming of modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""""""""
|
||||
|
||||
The names of environment modules generated by Spack are not always easy to
|
||||
The names of environment modules generated by spack are not always easy to
|
||||
fully comprehend due to the long hash in the name. There are three module
|
||||
configuration options to help with that. The first is a global setting to
|
||||
adjust the hash length. It can be set anywhere from 0 to 32 and has a default
|
||||
@@ -426,13 +353,6 @@ shows how to set hash length in the module file names:
|
||||
tcl:
|
||||
hash_length: 7
|
||||
|
||||
.. tip::
|
||||
|
||||
Using ``hide_implicits: true`` (see :ref:`autoloading-dependencies`) vastly reduces the number
|
||||
modules exposed to the user. The hidden modules always contain the hash in their name, and are
|
||||
not influenced by the ``hash_length`` setting. Hidden implicits thus make it easier to use a
|
||||
short hash length or no hash at all, without risking name conflicts.
|
||||
|
||||
To help make module names more readable, and to help alleviate name conflicts
|
||||
with a short hash, one can use the ``suffixes`` option in the modules
|
||||
configuration file. This option will add strings to modules that match a spec.
|
||||
@@ -445,12 +365,12 @@ For instance, the following config options,
|
||||
tcl:
|
||||
all:
|
||||
suffixes:
|
||||
^python@3.12: 'python-3.12'
|
||||
^python@2.7.12: 'python-2.7.12'
|
||||
^openblas: 'openblas'
|
||||
|
||||
will add a ``python-3.12`` version string to any packages compiled with
|
||||
Python matching the spec, ``python@3.12``. This is useful to know which
|
||||
version of Python a set of Python extensions is associated with. Likewise, the
|
||||
will add a ``python-2.7.12`` version string to any packages compiled with
|
||||
python matching the spec, ``python@2.7.12``. This is useful to know which
|
||||
version of python a set of python extensions is associated with. Likewise, the
|
||||
``openblas`` string is attached to any program that has openblas in the spec,
|
||||
most likely via the ``+blas`` variant specification.
|
||||
|
||||
@@ -548,11 +468,41 @@ that are already in the Lmod hierarchy.
|
||||
For hierarchies that are deeper than three layers ``lmod spider`` may have some issues.
|
||||
See `this discussion on the Lmod project <https://github.com/TACC/Lmod/issues/114>`_.
|
||||
|
||||
""""""""""""""""""""""
|
||||
Select default modules
|
||||
""""""""""""""""""""""
|
||||
|
||||
By default, when multiple modules of the same name share a directory,
|
||||
the highest version number will be the default module. This behavior
|
||||
of the ``module`` command can be overridden with a symlink named
|
||||
``default`` to the desired default module. If you wish to configure
|
||||
default modules with Spack, add a ``defaults`` key to your modules
|
||||
configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
my-module-set:
|
||||
tcl:
|
||||
defaults:
|
||||
- gcc@10.2.1
|
||||
- hdf5@1.2.10+mpi+hl%gcc
|
||||
|
||||
These defaults may be arbitrarily specific. For any package that
|
||||
satisfies a default, Spack will generate the module file in the
|
||||
appropriate path, and will generate a default symlink to the module
|
||||
file as well.
|
||||
|
||||
.. warning::
|
||||
If Spack is configured to generate multiple default packages in the
|
||||
same directory, the last modulefile to be generated will be the
|
||||
default module.
|
||||
|
||||
.. _customize-env-modifications:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
Customize environment modifications
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
|
||||
You can control which prefixes in a Spack package are added to
|
||||
environment variables with the ``prefix_inspections`` section; this
|
||||
@@ -650,9 +600,9 @@ stack to users who are likely to inspect the modules to find full
|
||||
paths to software, when it is desirable to present the users with a
|
||||
simpler set of paths than those generated by the Spack install tree.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
Filter out environment modifications
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Modifications to certain environment variables in module files are there by
|
||||
default, for instance because they are generated by prefix inspections.
|
||||
@@ -672,37 +622,49 @@ do so by using the ``exclude_env_vars``:
|
||||
The configuration above will generate module files that will not contain
|
||||
modifications to either ``CPATH`` or ``LIBRARY_PATH``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Select default modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
By default, when multiple modules of the same name share a directory,
|
||||
the highest version number will be the default module. This behavior
|
||||
of the ``module`` command can be overridden with a symlink named
|
||||
``default`` to the desired default module. If you wish to configure
|
||||
default modules with Spack, add a ``defaults`` key to your modules
|
||||
configuration:
|
||||
.. _autoloading-dependencies:
|
||||
|
||||
"""""""""""""""""""""
|
||||
Autoload dependencies
|
||||
"""""""""""""""""""""
|
||||
|
||||
Often it is required for a module to have its (transient) dependencies loaded as well.
|
||||
One example where this is useful is when one package needs to use executables provided
|
||||
by its dependency; when the dependency is autoloaded, the executable will be in the
|
||||
PATH. Similarly for scripting languages such as Python, packages and their dependencies
|
||||
have to be loaded together.
|
||||
|
||||
Autoloading is enabled by default for Lmod and Environment Modules. The former
|
||||
has builtin support for through the ``depends_on`` function. The latter uses
|
||||
``module load`` statement to load and track dependencies.
|
||||
|
||||
Autoloading can also be enabled conditionally:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
my-module-set:
|
||||
tcl:
|
||||
defaults:
|
||||
- gcc@10.2.1
|
||||
- hdf5@1.2.10+mpi+hl%gcc
|
||||
modules:
|
||||
default:
|
||||
tcl:
|
||||
all:
|
||||
autoload: none
|
||||
^python:
|
||||
autoload: direct
|
||||
|
||||
These defaults may be arbitrarily specific. For any package that
|
||||
satisfies a default, Spack will generate the module file in the
|
||||
appropriate path, and will generate a default symlink to the module
|
||||
file as well.
|
||||
The configuration file above will produce module files that will
|
||||
load their direct dependencies if the package installed depends on ``python``.
|
||||
The allowed values for the ``autoload`` statement are either ``none``,
|
||||
``direct`` or ``all``.
|
||||
|
||||
.. warning::
|
||||
If Spack is configured to generate multiple default packages in the
|
||||
same directory, the last modulefile to be generated will be the
|
||||
default module.
|
||||
|
||||
.. _maintaining-module-files:
|
||||
.. note::
|
||||
Tcl prerequisites
|
||||
In the ``tcl`` section of the configuration file it is possible to use
|
||||
the ``prerequisites`` directive that accepts the same values as
|
||||
``autoload``. It will produce module files that have a ``prereq``
|
||||
statement, which autoloads dependencies on Environment Modules when its
|
||||
``auto_handling`` configuration option is enabled. If Environment Modules
|
||||
is installed with Spack, ``auto_handling`` is enabled by default starting
|
||||
version 4.2. Otherwise it is enabled by default since version 5.0.
|
||||
|
||||
------------------------
|
||||
Maintaining Module Files
|
||||
|
||||
@@ -487,56 +487,6 @@ present. For instance with a configuration like:
|
||||
|
||||
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Conflicts and strong preferences
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If the semantic of requirements is too strong, you can also express "strong preferences" and "conflicts"
|
||||
from configuration files:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
prefer:
|
||||
- '%clang'
|
||||
conflict:
|
||||
- '+shared'
|
||||
|
||||
The ``prefer`` and ``conflict`` sections can be used whenever a ``require`` section is allowed.
|
||||
The argument is always a list of constraints, and each constraint can be either a simple string,
|
||||
or a more complex object:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
conflict:
|
||||
- spec: '%clang'
|
||||
when: 'target=x86_64_v3'
|
||||
message: 'reason why clang cannot be used'
|
||||
|
||||
The ``spec`` attribute is mandatory, while both ``when`` and ``message`` are optional.
|
||||
|
||||
.. note::
|
||||
|
||||
Requirements allow for expressing both "strong preferences" and "conflicts".
|
||||
The syntax for doing so, though, may not be immediately clear. For
|
||||
instance, if we want to prevent any package from using ``%clang``, we can set:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
require:
|
||||
- one_of: ['%clang', '@:']
|
||||
|
||||
Since only one of the requirements must hold, and ``@:`` is always true, the rule above is
|
||||
equivalent to a conflict. For "strong preferences" we need to substitute the ``one_of`` policy
|
||||
with ``any_of``.
|
||||
|
||||
|
||||
|
||||
.. _package-preferences:
|
||||
|
||||
-------------------
|
||||
@@ -647,8 +597,6 @@ manually placed files within the install prefix are owned by the
|
||||
assigned group. If no group is assigned, Spack will allow the OS
|
||||
default behavior to go as expected.
|
||||
|
||||
.. _assigning-package-attributes:
|
||||
|
||||
----------------------------
|
||||
Assigning Package Attributes
|
||||
----------------------------
|
||||
@@ -659,11 +607,10 @@ You can assign class-level attributes in the configuration:
|
||||
|
||||
packages:
|
||||
mpileaks:
|
||||
package_attributes:
|
||||
# Override existing attributes
|
||||
url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
|
||||
# ... or add new ones
|
||||
x: 1
|
||||
# Override existing attributes
|
||||
url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
|
||||
# ... or add new ones
|
||||
x: 1
|
||||
|
||||
Attributes set this way will be accessible to any method executed
|
||||
in the package.py file (e.g. the ``install()`` method). Values for these
|
||||
|
||||
@@ -6979,18 +6979,3 @@ you probably care most about are:
|
||||
You may also care about `license exceptions
|
||||
<https://spdx.org/licenses/exceptions-index.html>`_ that use the ``WITH`` operator,
|
||||
e.g. ``Apache-2.0 WITH LLVM-exception``.
|
||||
|
||||
Many of the licenses that are currently in the spack repositories have been
|
||||
automatically determined. While this is great for bulk adding license
|
||||
information and is most likely correct, there are sometimes edge cases that
|
||||
require manual intervention. To determine which licenses are validated and
|
||||
which are not, there is the `checked_by` parameter in the license directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("<license>", when="<when>", checked_by="<github username>")
|
||||
|
||||
When you have validated a github license, either when doing so explicitly or
|
||||
as part of packaging a new package, please set the `checked_by` parameter
|
||||
to your Github username to signal that the license has been manually
|
||||
verified.
|
||||
|
||||
@@ -810,7 +810,7 @@ generated by ``spack ci generate``. You also want your generated rebuild jobs
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
...
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
|
||||
@@ -17,7 +17,7 @@ experimental software separately from the built-in repository. Spack
|
||||
allows you to configure local repositories using either the
|
||||
``repos.yaml`` or the ``spack repo`` command.
|
||||
|
||||
A package repository is a directory structured like this::
|
||||
A package repository a directory structured like this::
|
||||
|
||||
repo/
|
||||
repo.yaml
|
||||
|
||||
@@ -2,12 +2,12 @@ sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.0
|
||||
python-levenshtein==0.23.0
|
||||
docutils==0.20.1
|
||||
pygments==2.17.2
|
||||
urllib3==2.2.0
|
||||
pytest==8.0.1
|
||||
urllib3==2.1.0
|
||||
pytest==7.4.4
|
||||
isort==5.13.2
|
||||
black==24.2.0
|
||||
black==23.12.1
|
||||
flake8==7.0.0
|
||||
mypy==1.8.0
|
||||
|
||||
@@ -171,7 +171,7 @@ def polite_path(components: Iterable[str]):
|
||||
@memoized
|
||||
def _polite_antipattern():
|
||||
# A regex of all the characters we don't want in a filename
|
||||
return re.compile(r"[^A-Za-z0-9_+.-]")
|
||||
return re.compile(r"[^A-Za-z0-9_.-]")
|
||||
|
||||
|
||||
def polite_filename(filename: str) -> str:
|
||||
@@ -920,34 +920,28 @@ def get_filetype(path_name):
|
||||
return output.strip()
|
||||
|
||||
|
||||
def has_shebang(path):
|
||||
"""Returns whether a path has a shebang line. Returns False if the file cannot be opened."""
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
return f.read(2) == b"#!"
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def is_nonsymlink_exe_with_shebang(path):
|
||||
"""Returns whether the path is an executable regular file with a shebang. Returns False too
|
||||
when the path is a symlink to a script, and also when the file cannot be opened."""
|
||||
"""
|
||||
Returns whether the path is an executable script with a shebang.
|
||||
Return False when the path is a *symlink* to an executable script.
|
||||
"""
|
||||
try:
|
||||
st = os.lstat(path)
|
||||
except OSError:
|
||||
return False
|
||||
# Should not be a symlink
|
||||
if stat.S_ISLNK(st.st_mode):
|
||||
return False
|
||||
|
||||
# Should not be a symlink
|
||||
if stat.S_ISLNK(st.st_mode):
|
||||
return False
|
||||
# Should be executable
|
||||
if not st.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH):
|
||||
return False
|
||||
|
||||
# Should be executable
|
||||
if not st.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH):
|
||||
# Should start with a shebang
|
||||
with open(path, "rb") as f:
|
||||
return f.read(2) == b"#!"
|
||||
except (IOError, OSError):
|
||||
return False
|
||||
|
||||
return has_shebang(path)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp_if_not_world_writable(path, group):
|
||||
@@ -1383,89 +1377,120 @@ def traverse_tree(
|
||||
yield (source_path, dest_path)
|
||||
|
||||
|
||||
def lexists_islink_isdir(path):
|
||||
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
|
||||
number of stat calls on unix. Use os.path and symlink.islink methods for windows."""
|
||||
if sys.platform == "win32":
|
||||
if not os.path.lexists(path):
|
||||
return False, False, False
|
||||
return os.path.lexists(path), islink(path), os.path.isdir(path)
|
||||
# First try to lstat, so we know if it's a link or not.
|
||||
try:
|
||||
lst = os.lstat(path)
|
||||
except (IOError, OSError):
|
||||
return False, False, False
|
||||
|
||||
is_link = stat.S_ISLNK(lst.st_mode)
|
||||
|
||||
# Check whether file is a dir.
|
||||
if not is_link:
|
||||
is_dir = stat.S_ISDIR(lst.st_mode)
|
||||
return True, is_link, is_dir
|
||||
|
||||
# Check whether symlink points to a dir.
|
||||
try:
|
||||
st = os.stat(path)
|
||||
is_dir = stat.S_ISDIR(st.st_mode)
|
||||
except (IOError, OSError):
|
||||
# Dangling symlink (i.e. it lexists but not exists)
|
||||
is_dir = False
|
||||
|
||||
return True, is_link, is_dir
|
||||
|
||||
|
||||
class BaseDirectoryVisitor:
|
||||
"""Base class and interface for :py:func:`visit_directory_tree`."""
|
||||
|
||||
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
"""Handle the non-symlink file at ``os.path.join(root, rel_path)``
|
||||
|
||||
Parameters:
|
||||
root: root directory
|
||||
rel_path: relative path to current file from ``root``
|
||||
root (str): root directory
|
||||
rel_path (str): relative path to current file from ``root``
|
||||
depth (int): depth of current file from the ``root`` directory"""
|
||||
pass
|
||||
|
||||
def visit_symlinked_file(self, root: str, rel_path: str, depth) -> None:
|
||||
"""Handle the symlink to a file at ``os.path.join(root, rel_path)``. Note: ``rel_path`` is
|
||||
the location of the symlink, not to what it is pointing to. The symlink may be dangling.
|
||||
def visit_symlinked_file(self, root, rel_path, depth):
|
||||
"""Handle the symlink to a file at ``os.path.join(root, rel_path)``.
|
||||
Note: ``rel_path`` is the location of the symlink, not to what it is
|
||||
pointing to. The symlink may be dangling.
|
||||
|
||||
Parameters:
|
||||
root: root directory
|
||||
rel_path: relative path to current symlink from ``root``
|
||||
depth: depth of current symlink from the ``root`` directory"""
|
||||
root (str): root directory
|
||||
rel_path (str): relative path to current symlink from ``root``
|
||||
depth (int): depth of current symlink from the ``root`` directory"""
|
||||
pass
|
||||
|
||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
def before_visit_dir(self, root, rel_path, depth):
|
||||
"""Return True from this function to recurse into the directory at
|
||||
os.path.join(root, rel_path). Return False in order not to recurse further.
|
||||
|
||||
Parameters:
|
||||
root: root directory
|
||||
rel_path: relative path to current directory from ``root``
|
||||
depth: depth of current directory from the ``root`` directory
|
||||
root (str): root directory
|
||||
rel_path (str): relative path to current directory from ``root``
|
||||
depth (int): depth of current directory from the ``root`` directory
|
||||
|
||||
Returns:
|
||||
bool: ``True`` when the directory should be recursed into. ``False`` when
|
||||
not"""
|
||||
return False
|
||||
|
||||
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
"""Return ``True`` to recurse into the symlinked directory and ``False`` in order not to.
|
||||
Note: ``rel_path`` is the path to the symlink itself. Following symlinked directories
|
||||
blindly can cause infinite recursion due to cycles.
|
||||
def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
"""Return ``True`` to recurse into the symlinked directory and ``False`` in
|
||||
order not to. Note: ``rel_path`` is the path to the symlink itself.
|
||||
Following symlinked directories blindly can cause infinite recursion due to
|
||||
cycles.
|
||||
|
||||
Parameters:
|
||||
root: root directory
|
||||
rel_path: relative path to current symlink from ``root``
|
||||
depth: depth of current symlink from the ``root`` directory
|
||||
root (str): root directory
|
||||
rel_path (str): relative path to current symlink from ``root``
|
||||
depth (int): depth of current symlink from the ``root`` directory
|
||||
|
||||
Returns:
|
||||
bool: ``True`` when the directory should be recursed into. ``False`` when
|
||||
not"""
|
||||
return False
|
||||
|
||||
def after_visit_dir(self, root: str, rel_path: str, depth: int) -> None:
|
||||
"""Called after recursion into ``rel_path`` finished. This function is not called when
|
||||
``rel_path`` was not recursed into.
|
||||
def after_visit_dir(self, root, rel_path, depth):
|
||||
"""Called after recursion into ``rel_path`` finished. This function is not
|
||||
called when ``rel_path`` was not recursed into.
|
||||
|
||||
Parameters:
|
||||
root: root directory
|
||||
rel_path: relative path to current directory from ``root``
|
||||
depth: depth of current directory from the ``root`` directory"""
|
||||
root (str): root directory
|
||||
rel_path (str): relative path to current directory from ``root``
|
||||
depth (int): depth of current directory from the ``root`` directory"""
|
||||
pass
|
||||
|
||||
def after_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> None:
|
||||
"""Called after recursion into ``rel_path`` finished. This function is not called when
|
||||
``rel_path`` was not recursed into.
|
||||
def after_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
"""Called after recursion into ``rel_path`` finished. This function is not
|
||||
called when ``rel_path`` was not recursed into.
|
||||
|
||||
Parameters:
|
||||
root: root directory
|
||||
rel_path: relative path to current symlink from ``root``
|
||||
depth: depth of current symlink from the ``root`` directory"""
|
||||
root (str): root directory
|
||||
rel_path (str): relative path to current symlink from ``root``
|
||||
depth (int): depth of current symlink from the ``root`` directory"""
|
||||
pass
|
||||
|
||||
|
||||
def visit_directory_tree(
|
||||
root: str, visitor: BaseDirectoryVisitor, rel_path: str = "", depth: int = 0
|
||||
):
|
||||
"""Recurses the directory root depth-first through a visitor pattern using the interface from
|
||||
:py:class:`BaseDirectoryVisitor`
|
||||
def visit_directory_tree(root, visitor, rel_path="", depth=0):
|
||||
"""Recurses the directory root depth-first through a visitor pattern using the
|
||||
interface from :py:class:`BaseDirectoryVisitor`
|
||||
|
||||
Parameters:
|
||||
root: path of directory to recurse into
|
||||
visitor: what visitor to use
|
||||
rel_path: current relative path from the root
|
||||
depth: current depth from the root
|
||||
root (str): path of directory to recurse into
|
||||
visitor (BaseDirectoryVisitor): what visitor to use
|
||||
rel_path (str): current relative path from the root
|
||||
depth (str): current depth from the root
|
||||
"""
|
||||
dir = os.path.join(root, rel_path)
|
||||
dir_entries = sorted(os.scandir(dir), key=lambda d: d.name)
|
||||
@@ -1473,19 +1498,26 @@ def visit_directory_tree(
|
||||
for f in dir_entries:
|
||||
rel_child = os.path.join(rel_path, f.name)
|
||||
islink = f.is_symlink()
|
||||
# On Windows, symlinks to directories are distinct from symlinks to files, and it is
|
||||
# possible to create a broken symlink to a directory (e.g. using os.symlink without
|
||||
# `target_is_directory=True`), invoking `isdir` on a symlink on Windows that is broken in
|
||||
# this manner will result in an error. In this case we can work around the issue by reading
|
||||
# the target and resolving the directory ourselves
|
||||
# On Windows, symlinks to directories are distinct from
|
||||
# symlinks to files, and it is possible to create a
|
||||
# broken symlink to a directory (e.g. using os.symlink
|
||||
# without `target_is_directory=True`), invoking `isdir`
|
||||
# on a symlink on Windows that is broken in this manner
|
||||
# will result in an error. In this case we can work around
|
||||
# the issue by reading the target and resolving the
|
||||
# directory ourselves
|
||||
try:
|
||||
isdir = f.is_dir()
|
||||
except OSError as e:
|
||||
if sys.platform == "win32" and hasattr(e, "winerror") and e.winerror == 5 and islink:
|
||||
# if path is a symlink, determine destination and evaluate file vs directory
|
||||
# if path is a symlink, determine destination and
|
||||
# evaluate file vs directory
|
||||
link_target = resolve_link_target_relative_to_the_link(f)
|
||||
# link_target might be relative but resolve_link_target_relative_to_the_link
|
||||
# will ensure that if so, that it is relative to the CWD and therefore makes sense
|
||||
# link_target might be relative but
|
||||
# resolve_link_target_relative_to_the_link
|
||||
# will ensure that if so, that it is relative
|
||||
# to the CWD and therefore
|
||||
# makes sense
|
||||
isdir = os.path.isdir(link_target)
|
||||
else:
|
||||
raise e
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
import filecmp
|
||||
import os
|
||||
import shutil
|
||||
from typing import Callable, Dict, List, Optional, Tuple
|
||||
from collections import OrderedDict
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, touch, traverse_tree
|
||||
@@ -51,32 +51,32 @@ class SourceMergeVisitor(BaseDirectoryVisitor):
|
||||
- A list of merge conflicts in dst/
|
||||
"""
|
||||
|
||||
def __init__(self, ignore: Optional[Callable[[str], bool]] = None):
|
||||
def __init__(self, ignore=None):
|
||||
self.ignore = ignore if ignore is not None else lambda f: False
|
||||
|
||||
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
|
||||
# bit to the relative path in the destination dir.
|
||||
self.projection: str = ""
|
||||
# When mapping <src root> to <dst root>/<projection>, we need
|
||||
# to prepend the <projection> bit to the relative path in the
|
||||
# destination dir.
|
||||
self.projection = ""
|
||||
|
||||
# Two files f and g conflict if they are not os.path.samefile(f, g) and they are both
|
||||
# projected to the same destination file. These conflicts are not necessarily fatal, and
|
||||
# can be resolved or ignored. For example <prefix>/LICENSE or
|
||||
# <site-packages>/<namespace>/__init__.py conflicts can be ignored).
|
||||
self.file_conflicts: List[MergeConflict] = []
|
||||
# When a file blocks another file, the conflict can sometimes
|
||||
# be resolved / ignored (e.g. <prefix>/LICENSE or
|
||||
# or <site-packages>/<namespace>/__init__.py conflicts can be
|
||||
# ignored).
|
||||
self.file_conflicts = []
|
||||
|
||||
# When we have to create a dir where a file is, or a file where a dir is, we have fatal
|
||||
# errors, listed here.
|
||||
self.fatal_conflicts: List[MergeConflict] = []
|
||||
# When we have to create a dir where a file is, or a file
|
||||
# where a dir is, we have fatal errors, listed here.
|
||||
self.fatal_conflicts = []
|
||||
|
||||
# What directories we have to make; this is an ordered dict, so that we have a fast lookup
|
||||
# and can run mkdir in order.
|
||||
self.directories: Dict[str, Tuple[str, str]] = {}
|
||||
# What directories we have to make; this is an ordered set,
|
||||
# so that we have a fast lookup and can run mkdir in order.
|
||||
self.directories = OrderedDict()
|
||||
|
||||
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
|
||||
# are guaranteed to be grouped by src_root in the order they were visited.
|
||||
self.files: Dict[str, Tuple[str, str]] = {}
|
||||
# Files to link. Maps dst_rel to (src_root, src_rel)
|
||||
self.files = OrderedDict()
|
||||
|
||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
def before_visit_dir(self, root, rel_path, depth):
|
||||
"""
|
||||
Register a directory if dst / rel_path is not blocked by a file or ignored.
|
||||
"""
|
||||
@@ -104,7 +104,7 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
self.directories[proj_rel_path] = (root, rel_path)
|
||||
return True
|
||||
|
||||
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
"""
|
||||
Replace symlinked dirs with actual directories when possible in low depths,
|
||||
otherwise handle it as a file (i.e. we link to the symlink).
|
||||
@@ -136,56 +136,40 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
|
||||
self.visit_file(root, rel_path, depth)
|
||||
return False
|
||||
|
||||
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
proj_rel_path = os.path.join(self.projection, rel_path)
|
||||
|
||||
if self.ignore(rel_path):
|
||||
pass
|
||||
elif proj_rel_path in self.directories:
|
||||
# Can't create a file where a dir is; fatal error
|
||||
src_a_root, src_a_relpath = self.directories[proj_rel_path]
|
||||
self.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(*self.directories[proj_rel_path]),
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path),
|
||||
)
|
||||
)
|
||||
elif proj_rel_path in self.files:
|
||||
# When two files project to the same path, they conflict iff they are distinct.
|
||||
# If they are the same (i.e. one links to the other), register regular files rather
|
||||
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
|
||||
# file, not the symlink.
|
||||
|
||||
src_a = os.path.join(*self.files[proj_rel_path])
|
||||
src_b = os.path.join(root, rel_path)
|
||||
|
||||
try:
|
||||
samefile = os.path.samefile(src_a, src_b)
|
||||
except OSError:
|
||||
samefile = False
|
||||
|
||||
if not samefile:
|
||||
# Distinct files produce a conflict.
|
||||
self.file_conflicts.append(
|
||||
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
||||
# In some cases we can resolve file-file conflicts
|
||||
src_a_root, src_a_relpath = self.files[proj_rel_path]
|
||||
self.file_conflicts.append(
|
||||
MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path),
|
||||
)
|
||||
return
|
||||
|
||||
if not symlink:
|
||||
# Remove the link in favor of the actual file. The del is necessary to maintain the
|
||||
# order of the files dict, which is grouped by root.
|
||||
del self.files[proj_rel_path]
|
||||
self.files[proj_rel_path] = (root, rel_path)
|
||||
|
||||
)
|
||||
else:
|
||||
# Otherwise register this file to be linked.
|
||||
self.files[proj_rel_path] = (root, rel_path)
|
||||
|
||||
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
def visit_symlinked_file(self, root, rel_path, depth):
|
||||
# Treat symlinked files as ordinary files (without "dereferencing")
|
||||
self.visit_file(root, rel_path, depth, symlink=True)
|
||||
self.visit_file(root, rel_path, depth)
|
||||
|
||||
def set_projection(self, projection: str) -> None:
|
||||
def set_projection(self, projection):
|
||||
self.projection = os.path.normpath(projection)
|
||||
|
||||
# Todo, is this how to check in general for empty projection?
|
||||
@@ -213,19 +197,24 @@ def set_projection(self, projection: str) -> None:
|
||||
|
||||
|
||||
class DestinationMergeVisitor(BaseDirectoryVisitor):
|
||||
"""DestinatinoMergeVisitor takes a SourceMergeVisitor and:
|
||||
"""DestinatinoMergeVisitor takes a SourceMergeVisitor
|
||||
and:
|
||||
|
||||
a. registers additional conflicts when merging to the destination prefix
|
||||
b. removes redundant mkdir operations when directories already exist in the destination prefix.
|
||||
a. registers additional conflicts when merging
|
||||
to the destination prefix
|
||||
b. removes redundant mkdir operations when
|
||||
directories already exist in the destination
|
||||
prefix.
|
||||
|
||||
This also makes sure that symlinked directories in the target prefix will never be merged with
|
||||
This also makes sure that symlinked directories
|
||||
in the target prefix will never be merged with
|
||||
directories in the sources directories.
|
||||
"""
|
||||
|
||||
def __init__(self, source_merge_visitor: SourceMergeVisitor):
|
||||
def __init__(self, source_merge_visitor):
|
||||
self.src = source_merge_visitor
|
||||
|
||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
def before_visit_dir(self, root, rel_path, depth):
|
||||
# If destination dir is a file in a src dir, add a conflict,
|
||||
# and don't traverse deeper
|
||||
if rel_path in self.src.files:
|
||||
@@ -247,7 +236,7 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
# don't descend into it.
|
||||
return False
|
||||
|
||||
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
"""
|
||||
Symlinked directories in the destination prefix should
|
||||
be seen as files; we should not accidentally merge
|
||||
@@ -273,7 +262,7 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
|
||||
# Never descend into symlinked target dirs.
|
||||
return False
|
||||
|
||||
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
# Can't merge a file if target already exists
|
||||
if rel_path in self.src.directories:
|
||||
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||
@@ -291,7 +280,7 @@ def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
)
|
||||
)
|
||||
|
||||
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
def visit_symlinked_file(self, root, rel_path, depth):
|
||||
# Treat symlinked files as ordinary files (without "dereferencing")
|
||||
self.visit_file(root, rel_path, depth)
|
||||
|
||||
|
||||
@@ -244,7 +244,7 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
+ lines
|
||||
+ ["as they might result in non-deterministic hashes"]
|
||||
)
|
||||
except (TypeError, AttributeError):
|
||||
except TypeError:
|
||||
details = []
|
||||
|
||||
errors.append(error_cls(summary=error_msg, details=details))
|
||||
@@ -292,6 +292,12 @@ def _avoid_mismatched_variants(error_cls):
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
|
||||
def make_error(config_data, summary):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
syaml.dump_config(config_data, stream=s, blame=True)
|
||||
return error_cls(summary=summary, details=[s.getvalue()])
|
||||
|
||||
for pkg_name in packages_yaml:
|
||||
# 'all:' must be more forgiving, since it is setting defaults for everything
|
||||
if pkg_name == "all" or "variants" not in packages_yaml[pkg_name]:
|
||||
@@ -311,7 +317,7 @@ def _avoid_mismatched_variants(error_cls):
|
||||
f"Setting a preference for the '{pkg_name}' package to the "
|
||||
f"non-existing variant '{variant.name}'"
|
||||
)
|
||||
errors.append(_make_config_error(preferences, summary, error_cls=error_cls))
|
||||
errors.append(make_error(preferences, summary))
|
||||
continue
|
||||
|
||||
# Variant cannot accept this value
|
||||
@@ -323,41 +329,11 @@ def _avoid_mismatched_variants(error_cls):
|
||||
f"Setting the variant '{variant.name}' of the '{pkg_name}' package "
|
||||
f"to the invalid value '{str(variant)}'"
|
||||
)
|
||||
errors.append(_make_config_error(preferences, summary, error_cls=error_cls))
|
||||
errors.append(make_error(preferences, summary))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _wrongly_named_spec(error_cls):
|
||||
"""Warns if the wrong name is used for an external spec"""
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
for pkg_name in packages_yaml:
|
||||
if pkg_name == "all":
|
||||
continue
|
||||
|
||||
externals = packages_yaml[pkg_name].get("externals", [])
|
||||
is_virtual = spack.repo.PATH.is_virtual(pkg_name)
|
||||
for entry in externals:
|
||||
spec = spack.spec.Spec(entry["spec"])
|
||||
regular_pkg_is_wrong = not is_virtual and pkg_name != spec.name
|
||||
virtual_pkg_is_wrong = is_virtual and not any(
|
||||
p.name == spec.name for p in spack.repo.PATH.providers_for(pkg_name)
|
||||
)
|
||||
if regular_pkg_is_wrong or virtual_pkg_is_wrong:
|
||||
summary = f"Wrong external spec detected for '{pkg_name}': {spec}"
|
||||
errors.append(_make_config_error(entry, summary, error_cls=error_cls))
|
||||
return errors
|
||||
|
||||
|
||||
def _make_config_error(config_data, summary, error_cls):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
syaml.dump_config(config_data, stream=s, blame=True)
|
||||
return error_cls(summary=summary, details=[s.getvalue()])
|
||||
|
||||
|
||||
#: Sanity checks on package directives
|
||||
package_directives = AuditClass(
|
||||
group="packages",
|
||||
@@ -796,30 +772,10 @@ def check_virtual_with_variants(spec, msg):
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
summary = f"{pkg_name}: unknown package '{dep_name}' in 'depends_on' directive"
|
||||
details = [f" in {filename}"]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
continue
|
||||
|
||||
# Check for self-referential specs similar to:
|
||||
#
|
||||
# depends_on("foo@X.Y", when="^foo+bar")
|
||||
#
|
||||
# That would allow clingo to choose whether to have foo@X.Y+bar in the graph.
|
||||
problematic_edges = [
|
||||
x for x in when.edges_to_dependencies(dep_name) if not x.virtuals
|
||||
]
|
||||
if problematic_edges and not dep.patches:
|
||||
summary = (
|
||||
f"{pkg_name}: dependency on '{dep.spec}' when '{when}' is self-referential"
|
||||
f"{pkg_name}: unknown package '{dep_name}' in " "'depends_on' directive"
|
||||
)
|
||||
details = [
|
||||
(
|
||||
f" please specify better using '^[virtuals=...] {dep_name}', or "
|
||||
f"substitute with an equivalent condition on '{pkg_name}'"
|
||||
),
|
||||
f" in {filename}",
|
||||
]
|
||||
details = [f" in {filename}"]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
continue
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
import codecs
|
||||
import collections
|
||||
import errno
|
||||
import hashlib
|
||||
import io
|
||||
import itertools
|
||||
@@ -22,7 +23,8 @@
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from contextlib import closing
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
@@ -48,7 +50,6 @@
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
import spack.util.archive
|
||||
import spack.util.crypto
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.util.gpg
|
||||
@@ -1132,46 +1133,205 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def gzip_compressed_tarfile(path):
|
||||
"""Create a reproducible, compressed tarfile"""
|
||||
# Create gzip compressed tarball of the install prefix
|
||||
# 1) Use explicit empty filename and mtime 0 for gzip header reproducibility.
|
||||
# If the filename="" is dropped, Python will use fileobj.name instead.
|
||||
# This should effectively mimick `gzip --no-name`.
|
||||
# 2) On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
|
||||
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
|
||||
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
|
||||
# So we follow gzip.
|
||||
with open(path, "wb") as f, ChecksumWriter(f) as inner_checksum, closing(
|
||||
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=inner_checksum)
|
||||
) as gzip_file, ChecksumWriter(gzip_file) as outer_checksum, tarfile.TarFile(
|
||||
name="", mode="w", fileobj=outer_checksum
|
||||
) as tar:
|
||||
yield tar, inner_checksum, outer_checksum
|
||||
|
||||
|
||||
def _tarinfo_name(absolute_path: str, *, _path=pathlib.PurePath) -> str:
|
||||
"""Compute tarfile entry name as the relative path from the (system) root."""
|
||||
return _path(*_path(absolute_path).parts[1:]).as_posix()
|
||||
|
||||
|
||||
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||
"""Create a tarfile of an install prefix of a spec. Skips existing buildinfo file.
|
||||
Only adds regular files, symlinks and dirs. Skips devices, fifos. Preserves hardlinks.
|
||||
Normalizes permissions like git. Tar entries are added in depth-first pre-order, with
|
||||
dir entries partitioned by file | dir, and sorted alphabetically, for reproducibility.
|
||||
Partitioning ensures only one dir is in memory at a time, and sorting improves compression.
|
||||
|
||||
Args:
|
||||
tar: tarfile object to add files to
|
||||
prefix: absolute install prefix of spec"""
|
||||
if not os.path.isabs(prefix) or not os.path.isdir(prefix):
|
||||
raise ValueError(f"prefix '{prefix}' must be an absolute path to a directory")
|
||||
hardlink_to_tarinfo_name: Dict[Tuple[int, int], str] = dict()
|
||||
stat_key = lambda stat: (stat.st_dev, stat.st_ino)
|
||||
|
||||
try: # skip buildinfo file if it exists
|
||||
files_to_skip = [stat_key(os.lstat(buildinfo_file_name(prefix)))]
|
||||
skip = lambda entry: stat_key(entry.stat(follow_symlinks=False)) in files_to_skip
|
||||
except OSError:
|
||||
skip = lambda entry: False
|
||||
files_to_skip = []
|
||||
|
||||
spack.util.archive.reproducible_tarfile_from_prefix(
|
||||
tar,
|
||||
prefix,
|
||||
# Spack <= 0.21 did not include parent directories, leading to issues when tarballs are
|
||||
# used in runtimes like AWS lambda.
|
||||
include_parent_directories=True,
|
||||
skip=skip,
|
||||
)
|
||||
# First add all directories leading up to `prefix` (Spack <= 0.21 did not do this, leading to
|
||||
# issues when tarballs are used in runtimes like AWS lambda). Skip the file system root.
|
||||
parent_dirs = reversed(pathlib.Path(prefix).parents)
|
||||
next(parent_dirs) # skip the root: slices are supported from python 3.10
|
||||
for parent_dir in parent_dirs:
|
||||
dir_info = tarfile.TarInfo(_tarinfo_name(str(parent_dir)))
|
||||
dir_info.type = tarfile.DIRTYPE
|
||||
dir_info.mode = 0o755
|
||||
tar.addfile(dir_info)
|
||||
|
||||
dir_stack = [prefix]
|
||||
while dir_stack:
|
||||
dir = dir_stack.pop()
|
||||
|
||||
# Add the dir before its contents
|
||||
dir_info = tarfile.TarInfo(_tarinfo_name(dir))
|
||||
dir_info.type = tarfile.DIRTYPE
|
||||
dir_info.mode = 0o755
|
||||
tar.addfile(dir_info)
|
||||
|
||||
# Sort by name: reproducible & improves compression
|
||||
with os.scandir(dir) as it:
|
||||
entries = sorted(it, key=lambda entry: entry.name)
|
||||
|
||||
new_dirs = []
|
||||
for entry in entries:
|
||||
if entry.is_dir(follow_symlinks=False):
|
||||
new_dirs.append(entry.path)
|
||||
continue
|
||||
|
||||
file_info = tarfile.TarInfo(_tarinfo_name(entry.path))
|
||||
|
||||
s = entry.stat(follow_symlinks=False)
|
||||
|
||||
# Skip existing binary distribution files.
|
||||
id = stat_key(s)
|
||||
if id in files_to_skip:
|
||||
continue
|
||||
|
||||
# Normalize the mode
|
||||
file_info.mode = 0o644 if s.st_mode & 0o100 == 0 else 0o755
|
||||
|
||||
if entry.is_symlink():
|
||||
file_info.type = tarfile.SYMTYPE
|
||||
file_info.linkname = os.readlink(entry.path)
|
||||
tar.addfile(file_info)
|
||||
|
||||
elif entry.is_file(follow_symlinks=False):
|
||||
# Deduplicate hardlinks
|
||||
if s.st_nlink > 1:
|
||||
if id in hardlink_to_tarinfo_name:
|
||||
file_info.type = tarfile.LNKTYPE
|
||||
file_info.linkname = hardlink_to_tarinfo_name[id]
|
||||
tar.addfile(file_info)
|
||||
continue
|
||||
hardlink_to_tarinfo_name[id] = file_info.name
|
||||
|
||||
# If file not yet seen, copy it.
|
||||
file_info.type = tarfile.REGTYPE
|
||||
file_info.size = s.st_size
|
||||
|
||||
with open(entry.path, "rb") as f:
|
||||
tar.addfile(file_info, f)
|
||||
|
||||
dir_stack.extend(reversed(new_dirs)) # we pop, so reverse to stay alphabetical
|
||||
|
||||
|
||||
class ChecksumWriter(io.BufferedIOBase):
|
||||
"""Checksum writer computes a checksum while writing to a file."""
|
||||
|
||||
myfileobj = None
|
||||
|
||||
def __init__(self, fileobj, algorithm=hashlib.sha256):
|
||||
self.fileobj = fileobj
|
||||
self.hasher = algorithm()
|
||||
self.length = 0
|
||||
|
||||
def hexdigest(self):
|
||||
return self.hasher.hexdigest()
|
||||
|
||||
def write(self, data):
|
||||
if isinstance(data, (bytes, bytearray)):
|
||||
length = len(data)
|
||||
else:
|
||||
data = memoryview(data)
|
||||
length = data.nbytes
|
||||
|
||||
if length > 0:
|
||||
self.fileobj.write(data)
|
||||
self.hasher.update(data)
|
||||
|
||||
self.length += length
|
||||
|
||||
return length
|
||||
|
||||
def read(self, size=-1):
|
||||
raise OSError(errno.EBADF, "read() on write-only object")
|
||||
|
||||
def read1(self, size=-1):
|
||||
raise OSError(errno.EBADF, "read1() on write-only object")
|
||||
|
||||
def peek(self, n):
|
||||
raise OSError(errno.EBADF, "peek() on write-only object")
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
return self.fileobj is None
|
||||
|
||||
def close(self):
|
||||
fileobj = self.fileobj
|
||||
if fileobj is None:
|
||||
return
|
||||
self.fileobj.close()
|
||||
self.fileobj = None
|
||||
|
||||
def flush(self):
|
||||
self.fileobj.flush()
|
||||
|
||||
def fileno(self):
|
||||
return self.fileobj.fileno()
|
||||
|
||||
def rewind(self):
|
||||
raise OSError("Can't rewind while computing checksum")
|
||||
|
||||
def readable(self):
|
||||
return False
|
||||
|
||||
def writable(self):
|
||||
return True
|
||||
|
||||
def seekable(self):
|
||||
return True
|
||||
|
||||
def tell(self):
|
||||
return self.fileobj.tell()
|
||||
|
||||
def seek(self, offset, whence=io.SEEK_SET):
|
||||
# In principle forward seek is possible with b"0" padding,
|
||||
# but this is not implemented.
|
||||
if offset == 0 and whence == io.SEEK_CUR:
|
||||
return
|
||||
raise OSError("Can't seek while computing checksum")
|
||||
|
||||
def readline(self, size=-1):
|
||||
raise OSError(errno.EBADF, "readline() on write-only object")
|
||||
|
||||
|
||||
def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
|
||||
with spack.util.archive.gzip_compressed_tarfile(tarfile_path) as (
|
||||
tar,
|
||||
inner_checksum,
|
||||
outer_checksum,
|
||||
):
|
||||
with gzip_compressed_tarfile(tarfile_path) as (tar, inner_checksum, outer_checksum):
|
||||
# Tarball the install prefix
|
||||
tarfile_of_spec_prefix(tar, binaries_dir)
|
||||
|
||||
# Serialize buildinfo for the tarball
|
||||
bstring = syaml.dump(buildinfo, default_flow_style=True).encode("utf-8")
|
||||
tarinfo = tarfile.TarInfo(
|
||||
name=spack.util.archive.default_path_to_name(buildinfo_file_name(binaries_dir))
|
||||
)
|
||||
tarinfo = tarfile.TarInfo(name=_tarinfo_name(buildinfo_file_name(binaries_dir)))
|
||||
tarinfo.type = tarfile.REGTYPE
|
||||
tarinfo.size = len(bstring)
|
||||
tarinfo.mode = 0o644
|
||||
|
||||
@@ -542,7 +542,7 @@ def verify_patchelf(patchelf: "spack.util.executable.Executable") -> bool:
|
||||
return version >= spack.version.Version("0.13.1")
|
||||
|
||||
|
||||
def ensure_patchelf_in_path_or_raise() -> spack.util.executable.Executable:
|
||||
def ensure_patchelf_in_path_or_raise() -> None:
|
||||
"""Ensure patchelf is in the PATH or raise."""
|
||||
# The old concretizer is not smart and we're doing its job: if the latest patchelf
|
||||
# does not concretize because the compiler doesn't support C++17, we try to
|
||||
|
||||
@@ -146,7 +146,7 @@ def mypy_root_spec() -> str:
|
||||
|
||||
def black_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap black"""
|
||||
return _root_spec("py-black@:24.1.0")
|
||||
return _root_spec("py-black@:23.1.0")
|
||||
|
||||
|
||||
def flake8_root_spec() -> str:
|
||||
|
||||
@@ -217,9 +217,6 @@ def clean_environment():
|
||||
env.unset("R_HOME")
|
||||
env.unset("R_ENVIRON")
|
||||
|
||||
env.unset("LUA_PATH")
|
||||
env.unset("LUA_CPATH")
|
||||
|
||||
# Affects GNU make, can e.g. indirectly inhibit enabling parallel build
|
||||
# env.unset('MAKEFLAGS')
|
||||
|
||||
@@ -555,55 +552,58 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
"""
|
||||
module = ModuleChangePropagator(pkg)
|
||||
|
||||
m = module
|
||||
|
||||
if context == Context.BUILD:
|
||||
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
jobs = determine_number_of_jobs(parallel=pkg.parallel)
|
||||
m.make_jobs = jobs
|
||||
|
||||
jobs = determine_number_of_jobs(parallel=pkg.parallel)
|
||||
module.make_jobs = jobs
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
m.make = MakeExecutable("make", jobs)
|
||||
m.gmake = MakeExecutable("gmake", jobs)
|
||||
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
# own
|
||||
if sys.platform == "win32":
|
||||
m.nmake = Executable("nmake")
|
||||
m.msbuild = Executable("msbuild")
|
||||
# analog to configure for win32
|
||||
m.cscript = Executable("cscript")
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
module.make = MakeExecutable("make", jobs)
|
||||
module.gmake = MakeExecutable("gmake", jobs)
|
||||
module.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
# own
|
||||
if sys.platform == "win32":
|
||||
module.nmake = Executable("nmake")
|
||||
module.msbuild = Executable("msbuild")
|
||||
# analog to configure for win32
|
||||
module.cscript = Executable("cscript")
|
||||
# Find the configure script in the archive path
|
||||
# Don't use which for this; we want to find it in the current dir.
|
||||
m.configure = Executable("./configure")
|
||||
|
||||
# Find the configure script in the archive path
|
||||
# Don't use which for this; we want to find it in the current dir.
|
||||
module.configure = Executable("./configure")
|
||||
# Standard CMake arguments
|
||||
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
# Put spack compiler paths in module scope. (Some packages use it
|
||||
# in setup_run_environment etc, so don't put it context == build)
|
||||
link_dir = spack.paths.build_env_path
|
||||
module.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
|
||||
m.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
|
||||
m.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
|
||||
m.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
|
||||
m.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
|
||||
|
||||
# Useful directories within the prefix are encapsulated in
|
||||
# a Prefix object.
|
||||
module.prefix = pkg.prefix
|
||||
m.prefix = pkg.prefix
|
||||
|
||||
# Platform-specific library suffix.
|
||||
module.dso_suffix = dso_suffix
|
||||
m.dso_suffix = dso_suffix
|
||||
|
||||
def static_to_shared_library(static_lib, shared_lib=None, **kwargs):
|
||||
compiler_path = kwargs.get("compiler", module.spack_cc)
|
||||
compiler_path = kwargs.get("compiler", m.spack_cc)
|
||||
compiler = Executable(compiler_path)
|
||||
|
||||
return _static_to_shared_library(
|
||||
pkg.spec.architecture, compiler, static_lib, shared_lib, **kwargs
|
||||
)
|
||||
|
||||
module.static_to_shared_library = static_to_shared_library
|
||||
m.static_to_shared_library = static_to_shared_library
|
||||
|
||||
module.propagate_changes_to_mro()
|
||||
|
||||
@@ -972,8 +972,8 @@ def __init__(self, *specs: spack.spec.Spec, context: Context) -> None:
|
||||
self.should_set_package_py_globals = (
|
||||
self.should_setup_dependent_build_env | self.should_setup_run_env | UseMode.ROOT
|
||||
)
|
||||
# In a build context, the root needs build-specific globals set.
|
||||
self.needs_build_context = UseMode.ROOT
|
||||
# In a build context, the root and direct build deps need build-specific globals set.
|
||||
self.needs_build_context = UseMode.ROOT | UseMode.BUILDTIME_DIRECT
|
||||
|
||||
def set_all_package_py_globals(self):
|
||||
"""Set the globals in modules of package.py files."""
|
||||
|
||||
@@ -199,8 +199,6 @@ def initconfig_mpi_entries(self):
|
||||
mpiexec = "/usr/bin/srun"
|
||||
else:
|
||||
mpiexec = os.path.join(spec["slurm"].prefix.bin, "srun")
|
||||
elif hasattr(spec["mpi"].package, "mpiexec"):
|
||||
mpiexec = spec["mpi"].package.mpiexec
|
||||
else:
|
||||
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpirun")
|
||||
if not os.path.exists(mpiexec):
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.deptypes as dt
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
@@ -32,86 +31,8 @@ def _extract_primary_generator(generator):
|
||||
primary generator from the generator string which may contain an
|
||||
optional secondary generator.
|
||||
"""
|
||||
return _primary_generator_extractor.match(generator).group(1)
|
||||
|
||||
|
||||
def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]) -> None:
|
||||
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
|
||||
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
|
||||
``find_python_hints`` for context."""
|
||||
if not getattr(pkg, "find_python_hints", False):
|
||||
return
|
||||
pythons = pkg.spec.dependencies("python", dt.BUILD | dt.LINK)
|
||||
if len(pythons) != 1:
|
||||
return
|
||||
try:
|
||||
python_executable = pythons[0].package.command.path
|
||||
except RuntimeError:
|
||||
return
|
||||
|
||||
args.extend(
|
||||
[
|
||||
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
||||
CMakeBuilder.define("Python_EXECUTABLE", python_executable),
|
||||
CMakeBuilder.define("Python3_EXECUTABLE", python_executable),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def _supports_compilation_databases(pkg: spack.package_base.PackageBase) -> bool:
|
||||
"""Check if this package (and CMake) can support compilation databases."""
|
||||
|
||||
# CMAKE_EXPORT_COMPILE_COMMANDS only exists for CMake >= 3.5
|
||||
if not pkg.spec.satisfies("^cmake@3.5:"):
|
||||
return False
|
||||
|
||||
# CMAKE_EXPORT_COMPILE_COMMANDS is only implemented for Makefile and Ninja generators
|
||||
if not (pkg.spec.satisfies("generator=make") or pkg.spec.satisfies("generator=ninja")):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[str]) -> None:
|
||||
"""Set a few default defines for CMake, depending on its version."""
|
||||
cmakes = pkg.spec.dependencies("cmake", dt.BUILD)
|
||||
|
||||
if len(cmakes) != 1:
|
||||
return
|
||||
|
||||
cmake = cmakes[0]
|
||||
|
||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||
try:
|
||||
ipo = pkg.spec.variants["ipo"].value
|
||||
except KeyError:
|
||||
ipo = False
|
||||
|
||||
if cmake.satisfies("@3.9:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||
|
||||
# Disable Package Registry: export(PACKAGE) may put files in the user's home directory, and
|
||||
# find_package may search there. This is not what we want.
|
||||
|
||||
# Do not populate CMake User Package Registry
|
||||
if cmake.satisfies("@3.15:"):
|
||||
# see https://cmake.org/cmake/help/latest/policy/CMP0090.html
|
||||
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
|
||||
elif cmake.satisfies("@3.1:"):
|
||||
# see https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_NO_PACKAGE_REGISTRY.html
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
|
||||
|
||||
# Do not use CMake User/System Package Registry
|
||||
# https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#disabling-the-package-registry
|
||||
if cmake.satisfies("@3.16:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
|
||||
elif cmake.satisfies("@3.1:3.15"):
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
|
||||
|
||||
# Export a compilation database if supported.
|
||||
if _supports_compilation_databases(pkg):
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||
primary_generator = _primary_generator_extractor.match(generator).group(1)
|
||||
return primary_generator
|
||||
|
||||
|
||||
def generator(*names: str, default: Optional[str] = None):
|
||||
@@ -165,13 +86,6 @@ class CMakePackage(spack.package_base.PackageBase):
|
||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||
legacy_buildsystem = "cmake"
|
||||
|
||||
#: When this package depends on Python and ``find_python_hints`` is set to True, pass the
|
||||
#: defines {Python3,Python,PYTHON}_EXECUTABLE explicitly, so that CMake locates the right
|
||||
#: Python in its builtin FindPython3, FindPython, and FindPythonInterp modules. Spack does
|
||||
#: CMake's job because CMake's modules by default only search for Python versions known at the
|
||||
#: time of release.
|
||||
find_python_hints = True
|
||||
|
||||
build_system("cmake")
|
||||
|
||||
with when("build_system=cmake"):
|
||||
@@ -302,10 +216,7 @@ class CMakeBuilder(BaseBuilder):
|
||||
@property
|
||||
def archive_files(self):
|
||||
"""Files to archive for packages based on CMake"""
|
||||
files = [os.path.join(self.build_directory, "CMakeCache.txt")]
|
||||
if _supports_compilation_databases(self):
|
||||
files.append(os.path.join(self.build_directory, "compile_commands.json"))
|
||||
return files
|
||||
return [os.path.join(self.build_directory, "CMakeCache.txt")]
|
||||
|
||||
@property
|
||||
def root_cmakelists_dir(self):
|
||||
@@ -330,9 +241,9 @@ def std_cmake_args(self):
|
||||
"""Standard cmake arguments provided as a property for
|
||||
convenience of package writers
|
||||
"""
|
||||
args = CMakeBuilder.std_args(self.pkg, generator=self.generator)
|
||||
args += getattr(self.pkg, "cmake_flag_args", [])
|
||||
return args
|
||||
std_cmake_args = CMakeBuilder.std_args(self.pkg, generator=self.generator)
|
||||
std_cmake_args += getattr(self.pkg, "cmake_flag_args", [])
|
||||
return std_cmake_args
|
||||
|
||||
@staticmethod
|
||||
def std_args(pkg, generator=None):
|
||||
@@ -352,6 +263,11 @@ def std_args(pkg, generator=None):
|
||||
except KeyError:
|
||||
build_type = "RelWithDebInfo"
|
||||
|
||||
try:
|
||||
ipo = pkg.spec.variants["ipo"].value
|
||||
except KeyError:
|
||||
ipo = False
|
||||
|
||||
define = CMakeBuilder.define
|
||||
args = [
|
||||
"-G",
|
||||
@@ -360,6 +276,10 @@ def std_args(pkg, generator=None):
|
||||
define("CMAKE_BUILD_TYPE", build_type),
|
||||
]
|
||||
|
||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||
if pkg.spec.satisfies("^cmake@3.9:"):
|
||||
args.append(define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||
|
||||
if primary_generator == "Unix Makefiles":
|
||||
args.append(define("CMAKE_VERBOSE_MAKEFILE", True))
|
||||
|
||||
@@ -368,9 +288,6 @@ def std_args(pkg, generator=None):
|
||||
[define("CMAKE_FIND_FRAMEWORK", "LAST"), define("CMAKE_FIND_APPBUNDLE", "LAST")]
|
||||
)
|
||||
|
||||
_conditional_cmake_defaults(pkg, args)
|
||||
_maybe_set_python_hints(pkg, args)
|
||||
|
||||
# Set up CMake rpath
|
||||
args.extend(
|
||||
[
|
||||
|
||||
@@ -218,7 +218,7 @@ def pset_components(self):
|
||||
"+inspector": " intel-inspector",
|
||||
"+itac": " intel-itac intel-ta intel-tc" " intel-trace-analyzer intel-trace-collector",
|
||||
# Trace Analyzer and Collector
|
||||
"+vtune": " intel-vtune",
|
||||
"+vtune": " intel-vtune"
|
||||
# VTune, ..-profiler since 2020, ..-amplifier before
|
||||
}.items():
|
||||
if variant in self.spec:
|
||||
|
||||
@@ -29,12 +29,15 @@ class LuaPackage(spack.package_base.PackageBase):
|
||||
|
||||
with when("build_system=lua"):
|
||||
depends_on("lua-lang")
|
||||
with when("^[virtuals=lua-lang] lua"):
|
||||
extends("lua")
|
||||
with when("^[virtuals=lua-lang] lua-luajit"):
|
||||
extends("lua-luajit+lualinks")
|
||||
with when("^[virtuals=lua-lang] lua-luajit-openresty"):
|
||||
extends("lua-luajit-openresty+lualinks")
|
||||
extends("lua", when="^lua")
|
||||
with when("^lua-luajit"):
|
||||
extends("lua-luajit")
|
||||
depends_on("luajit")
|
||||
depends_on("lua-luajit+lualinks")
|
||||
with when("^lua-luajit-openresty"):
|
||||
extends("lua-luajit-openresty")
|
||||
depends_on("luajit")
|
||||
depends_on("lua-luajit-openresty+lualinks")
|
||||
|
||||
@property
|
||||
def lua(self):
|
||||
|
||||
@@ -149,7 +149,7 @@ def std_args(pkg):
|
||||
else:
|
||||
default_library = "shared"
|
||||
|
||||
return [
|
||||
args = [
|
||||
"-Dprefix={0}".format(pkg.prefix),
|
||||
# If we do not specify libdir explicitly, Meson chooses something
|
||||
# like lib/x86_64-linux-gnu, which causes problems when trying to
|
||||
@@ -163,6 +163,8 @@ def std_args(pkg):
|
||||
"-Dwrap_mode=nodownload",
|
||||
]
|
||||
|
||||
return args
|
||||
|
||||
@property
|
||||
def build_dirname(self):
|
||||
"""Returns the directory name to use when building the package."""
|
||||
|
||||
@@ -9,13 +9,10 @@
|
||||
import shutil
|
||||
from os.path import basename, isdir
|
||||
|
||||
from llnl.util import tty
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, find_libraries, join_path, mkdirp
|
||||
from llnl.util.filesystem import HeaderList, find_libraries, join_path, mkdirp
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.directives import conflicts, variant
|
||||
from spack.package_base import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
|
||||
@@ -182,72 +179,16 @@ class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
||||
|
||||
"""
|
||||
|
||||
def openmp_libs(self):
|
||||
"""Supply LibraryList for linking OpenMP"""
|
||||
|
||||
# NB: Hunting down explicit library files may be the Spack way of
|
||||
# doing things, but it is better to add the compiler defined option
|
||||
# e.g. -fopenmp
|
||||
|
||||
# If other packages use openmp, then all the packages need to
|
||||
# support the same ABI. Spack usually uses the same compiler
|
||||
# for all the packages, but you can force it if necessary:
|
||||
#
|
||||
# e.g. spack install blaspp%oneapi@2024 ^intel-oneapi-mkl%oneapi@2024
|
||||
#
|
||||
if self.spec.satisfies("%intel") or self.spec.satisfies("%oneapi"):
|
||||
libname = "libiomp5"
|
||||
elif self.spec.satisfies("%gcc"):
|
||||
libname = "libgomp"
|
||||
elif self.spec.satisfies("%clang"):
|
||||
libname = "libomp"
|
||||
else:
|
||||
raise InstallError(
|
||||
"OneAPI package with OpenMP threading requires one of %clang, %gcc, %oneapi, "
|
||||
"or %intel"
|
||||
)
|
||||
|
||||
# query the compiler for the library path
|
||||
with self.compiler.compiler_environment():
|
||||
omp_lib_path = Executable(self.compiler.cc)(
|
||||
"--print-file-name", f"{libname}.{dso_suffix}", output=str
|
||||
).strip()
|
||||
|
||||
# Newer versions of clang do not give the full path to libomp. If that's
|
||||
# the case, look in a path relative to the compiler where libomp is
|
||||
# typically found. If it's not found there, error out.
|
||||
if not os.path.exists(omp_lib_path) and self.spec.satisfies("%clang"):
|
||||
compiler_root = os.path.dirname(os.path.dirname(os.path.realpath(self.compiler.cc)))
|
||||
omp_lib_path_compiler = os.path.join(compiler_root, "lib", f"{libname}.{dso_suffix}")
|
||||
if os.path.exists(omp_lib_path_compiler):
|
||||
omp_lib_path = omp_lib_path_compiler
|
||||
|
||||
# if the compiler cannot find the file, it returns the input path
|
||||
if not os.path.exists(omp_lib_path):
|
||||
raise InstallError(f"OneAPI package cannot locate OpenMP library: {omp_lib_path}")
|
||||
|
||||
omp_libs = LibraryList(omp_lib_path)
|
||||
tty.info(f"OneAPI package requires OpenMP library: {omp_libs}")
|
||||
return omp_libs
|
||||
|
||||
# find_headers uses heuristics to determine the include directory
|
||||
# that does not work for oneapi packages. Use explicit directories
|
||||
# instead.
|
||||
def header_directories(self, dirs):
|
||||
h = HeaderList([])
|
||||
h.directories = dirs
|
||||
# trilinos passes the directories to cmake, and cmake requires
|
||||
# that the directory exists
|
||||
for dir in dirs:
|
||||
if not isdir(dir):
|
||||
raise RuntimeError(f"{dir} does not exist")
|
||||
return h
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
# This should match the directories added to CPATH by
|
||||
# env/vars.sh for the component
|
||||
return self.header_directories([self.component_prefix.include])
|
||||
return self.header_directories(
|
||||
[self.component_prefix.include, self.component_prefix.include.join(self.component_dir)]
|
||||
)
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
|
||||
@@ -6,8 +6,7 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
from typing import Dict, Iterable, List, Mapping, Optional, Tuple
|
||||
from typing import Iterable, List, Mapping, Optional
|
||||
|
||||
import archspec
|
||||
|
||||
@@ -137,52 +136,31 @@ def view_file_conflicts(self, view, merge_map):
|
||||
return conflicts
|
||||
|
||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||
# Patch up shebangs to the python linked in the view only if python is built by Spack.
|
||||
if not self.extendee_spec or self.extendee_spec.external:
|
||||
if not self.extendee_spec:
|
||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||
|
||||
# We only patch shebangs in the bin directory.
|
||||
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
|
||||
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
|
||||
|
||||
bin_dir = self.spec.prefix.bin
|
||||
python_prefix = self.extendee_spec.prefix
|
||||
python_is_external = self.extendee_spec.external
|
||||
global_view = fs.same_path(python_prefix, view.get_projection_for_spec(self.spec))
|
||||
for src, dst in merge_map.items():
|
||||
if skip_if_exists and os.path.lexists(dst):
|
||||
if os.path.exists(dst):
|
||||
continue
|
||||
|
||||
if not fs.path_contains_subdirectory(src, bin_dir):
|
||||
elif global_view or not fs.path_contains_subdirectory(src, bin_dir):
|
||||
view.link(src, dst)
|
||||
continue
|
||||
|
||||
s = os.lstat(src)
|
||||
|
||||
# Symlink is delayed because we may need to re-target if its target is copied in view
|
||||
if stat.S_ISLNK(s.st_mode):
|
||||
delayed_links.append((src, dst))
|
||||
continue
|
||||
|
||||
# If it's executable and has a shebang, copy and patch it.
|
||||
if (s.st_mode & 0b111) and fs.has_shebang(src):
|
||||
copied_files[(s.st_dev, s.st_ino)] = dst
|
||||
elif not os.path.islink(src):
|
||||
shutil.copy2(src, dst)
|
||||
fs.filter_file(
|
||||
python_prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
||||
)
|
||||
is_script = fs.is_nonsymlink_exe_with_shebang(src)
|
||||
if is_script and not python_is_external:
|
||||
fs.filter_file(
|
||||
python_prefix,
|
||||
os.path.abspath(view.get_projection_for_spec(self.spec)),
|
||||
dst,
|
||||
)
|
||||
else:
|
||||
view.link(src, dst)
|
||||
|
||||
# Finally re-target the symlinks that point to copied files.
|
||||
for src, dst in delayed_links:
|
||||
try:
|
||||
s = os.stat(src)
|
||||
target = copied_files[(s.st_dev, s.st_ino)]
|
||||
except (OSError, KeyError):
|
||||
target = None
|
||||
if target:
|
||||
os.symlink(target, dst)
|
||||
else:
|
||||
view.link(src, dst, spec=self.spec)
|
||||
orig_link_target = os.path.realpath(src)
|
||||
new_link_target = os.path.abspath(merge_map[orig_link_target])
|
||||
view.link(new_link_target, dst)
|
||||
|
||||
def remove_files_from_view(self, view, merge_map):
|
||||
ignore_namespace = False
|
||||
|
||||
@@ -35,9 +35,9 @@ def _misc_cache():
|
||||
|
||||
|
||||
#: Spack's cache for small data
|
||||
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_misc_cache)
|
||||
)
|
||||
MISC_CACHE: Union[
|
||||
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_misc_cache)
|
||||
|
||||
|
||||
def fetch_cache_location():
|
||||
@@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_fetch_cache)
|
||||
)
|
||||
FETCH_CACHE: Union[
|
||||
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_fetch_cache)
|
||||
|
||||
@@ -7,7 +7,9 @@
|
||||
get_job_name = lambda needs_entry: (
|
||||
needs_entry.get("job")
|
||||
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
|
||||
else needs_entry if isinstance(needs_entry, str) else None
|
||||
else needs_entry
|
||||
if isinstance(needs_entry, str)
|
||||
else None
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -7,14 +7,13 @@
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import multiprocessing
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import urllib.request
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
@@ -327,30 +326,8 @@ def _progress(i: int, total: int):
|
||||
return ""
|
||||
|
||||
|
||||
class NoPool:
|
||||
def map(self, func, args):
|
||||
return [func(a) for a in args]
|
||||
|
||||
def starmap(self, func, args):
|
||||
return [func(*a) for a in args]
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
|
||||
|
||||
MaybePool = Union[multiprocessing.pool.Pool, NoPool]
|
||||
|
||||
|
||||
def _make_pool() -> MaybePool:
|
||||
"""Can't use threading because it's unsafe, and can't use spawned processes because of globals.
|
||||
That leaves only forking"""
|
||||
if multiprocessing.get_start_method() == "fork":
|
||||
return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True))
|
||||
else:
|
||||
return NoPool()
|
||||
def _make_pool():
|
||||
return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True))
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
@@ -686,7 +663,7 @@ def _push_oci(
|
||||
base_image: Optional[ImageReference],
|
||||
installed_specs_with_deps: List[Spec],
|
||||
tmpdir: str,
|
||||
pool: MaybePool,
|
||||
pool: multiprocessing.pool.Pool,
|
||||
force: bool = False,
|
||||
) -> Tuple[List[str], Dict[str, Tuple[dict, dict]], Dict[str, spack.oci.oci.Blob]]:
|
||||
"""Push specs to an OCI registry
|
||||
@@ -802,10 +779,11 @@ def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
||||
return config if "spec" in config else None
|
||||
|
||||
|
||||
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
||||
request = urllib.request.Request(url=image_ref.tags_url())
|
||||
response = spack.oci.opener.urlopen(request)
|
||||
spack.oci.opener.ensure_status(request, response, 200)
|
||||
def _update_index_oci(
|
||||
image_ref: ImageReference, tmpdir: str, pool: multiprocessing.pool.Pool
|
||||
) -> None:
|
||||
response = spack.oci.opener.urlopen(urllib.request.Request(url=image_ref.tags_url()))
|
||||
spack.oci.opener.ensure_status(response, 200)
|
||||
tags = json.load(response)["tags"]
|
||||
|
||||
# Fetch all image config files in parallel
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, Optional
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.lang
|
||||
@@ -18,15 +17,10 @@
|
||||
import spack.util.crypto
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd.common import arguments
|
||||
from spack.package_base import (
|
||||
ManualDownloadRequiredError,
|
||||
PackageBase,
|
||||
deprecated_version,
|
||||
preferred_version,
|
||||
)
|
||||
from spack.package_base import PackageBase, deprecated_version, preferred_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.version import StandardVersion, Version
|
||||
from spack.version import Version
|
||||
|
||||
description = "checksum available versions of a package"
|
||||
section = "packaging"
|
||||
@@ -90,30 +84,28 @@ def checksum(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
|
||||
# Get the package we're going to generate checksums for
|
||||
pkg: PackageBase = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
||||
|
||||
# Skip manually downloaded packages
|
||||
if pkg.manual_download:
|
||||
raise ManualDownloadRequiredError(pkg.download_instr)
|
||||
versions = [Version(v) for v in args.versions]
|
||||
|
||||
versions = [StandardVersion.from_string(v) for v in args.versions]
|
||||
|
||||
# Define placeholder for remote versions. This'll help reduce redundant work if we need to
|
||||
# check for the existence of remote versions more than once.
|
||||
remote_versions: Optional[Dict[StandardVersion, str]] = None
|
||||
# Define placeholder for remote versions.
|
||||
# This'll help reduce redundant work if we need to check for the existance
|
||||
# of remote versions more than once.
|
||||
remote_versions = None
|
||||
|
||||
# Add latest version if requested
|
||||
if args.latest:
|
||||
remote_versions = pkg.fetch_remote_versions(concurrency=args.jobs)
|
||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
if len(remote_versions) > 0:
|
||||
versions.append(max(remote_versions.keys()))
|
||||
latest_version = sorted(remote_versions.keys(), reverse=True)[0]
|
||||
versions.append(latest_version)
|
||||
|
||||
# Add preferred version if requested (todo: exclude git versions)
|
||||
# Add preferred version if requested
|
||||
if args.preferred:
|
||||
versions.append(preferred_version(pkg))
|
||||
|
||||
# Store a dict of the form version -> URL
|
||||
url_dict: Dict[StandardVersion, str] = {}
|
||||
url_dict = {}
|
||||
|
||||
for version in versions:
|
||||
if deprecated_version(pkg, version):
|
||||
@@ -123,16 +115,16 @@ def checksum(parser, args):
|
||||
if url is not None:
|
||||
url_dict[version] = url
|
||||
continue
|
||||
# If we get here, it's because no valid url was provided by the package. Do expensive
|
||||
# fallback to try to recover
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions(concurrency=args.jobs)
|
||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
|
||||
if len(versions) <= 0:
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions(concurrency=args.jobs)
|
||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
url_dict = remote_versions
|
||||
|
||||
# A spidered URL can differ from the package.py *computed* URL, pointing to different tarballs.
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -158,9 +157,7 @@ def setup_parser(subparser):
|
||||
description=deindent(ci_reproduce.__doc__),
|
||||
help=spack.cmd.first_line(ci_reproduce.__doc__),
|
||||
)
|
||||
reproduce.add_argument(
|
||||
"job_url", help="URL of GitLab job web page or artifact", type=_gitlab_artifacts_url
|
||||
)
|
||||
reproduce.add_argument("job_url", help="URL of job artifacts bundle")
|
||||
reproduce.add_argument(
|
||||
"--runtime",
|
||||
help="Container runtime to use.",
|
||||
@@ -795,6 +792,11 @@ def ci_reproduce(args):
|
||||
artifacts of the provided gitlab pipeline rebuild job's URL will be used to derive
|
||||
instructions for reproducing the build locally
|
||||
"""
|
||||
job_url = args.job_url
|
||||
work_dir = args.working_dir
|
||||
autostart = args.autostart
|
||||
runtime = args.runtime
|
||||
|
||||
# Allow passing GPG key for reprocuding protected CI jobs
|
||||
if args.gpg_file:
|
||||
gpg_key_url = url_util.path_to_file_url(args.gpg_file)
|
||||
@@ -803,47 +805,7 @@ def ci_reproduce(args):
|
||||
else:
|
||||
gpg_key_url = None
|
||||
|
||||
return spack_ci.reproduce_ci_job(
|
||||
args.job_url, args.working_dir, args.autostart, gpg_key_url, args.runtime
|
||||
)
|
||||
|
||||
|
||||
def _gitlab_artifacts_url(url: str) -> str:
|
||||
"""Take a URL either to the URL of the job in the GitLab UI, or to the artifacts zip file,
|
||||
and output the URL to the artifacts zip file."""
|
||||
parsed = urlparse(url)
|
||||
|
||||
if not parsed.scheme or not parsed.netloc:
|
||||
raise ValueError(url)
|
||||
|
||||
parts = parsed.path.split("/")
|
||||
|
||||
if len(parts) < 2:
|
||||
raise ValueError(url)
|
||||
|
||||
# Just use API endpoints verbatim, they're probably generated by Spack.
|
||||
if parts[1] == "api":
|
||||
return url
|
||||
|
||||
# If it's a URL to the job in the Gitlab UI, we may need to append the artifacts path.
|
||||
minus_idx = parts.index("-")
|
||||
|
||||
# Remove repeated slashes in the remainder
|
||||
rest = [p for p in parts[minus_idx + 1 :] if p]
|
||||
|
||||
# Now the format is jobs/X or jobs/X/artifacts/download
|
||||
if len(rest) < 2 or rest[0] != "jobs":
|
||||
raise ValueError(url)
|
||||
|
||||
if len(rest) == 2:
|
||||
# replace jobs/X with jobs/X/artifacts/download
|
||||
rest.extend(("artifacts", "download"))
|
||||
|
||||
# Replace the parts and unparse.
|
||||
parts[minus_idx + 1 :] = rest
|
||||
|
||||
# Don't allow fragments / queries
|
||||
return urlunparse(parsed._replace(path="/".join(parts), fragment="", query=""))
|
||||
return spack_ci.reproduce_ci_job(job_url, work_dir, autostart, gpg_key_url, runtime)
|
||||
|
||||
|
||||
def ci(parser, args):
|
||||
|
||||
@@ -115,7 +115,7 @@ def emulate_env_utility(cmd_name, context: Context, args):
|
||||
f"Not all dependencies of {spec.name} are installed. "
|
||||
f"Cannot setup {context} environment:",
|
||||
spec.tree(
|
||||
install_status=True,
|
||||
status_fn=spack.spec.Spec.install_status,
|
||||
hashlen=7,
|
||||
hashes=True,
|
||||
# This shows more than necessary, but we cannot dynamically change deptypes
|
||||
|
||||
@@ -76,10 +76,6 @@ def setup_parser(subparser):
|
||||
)
|
||||
add_parser.add_argument("-f", "--file", help="file from which to set all config values")
|
||||
|
||||
change_parser = sp.add_parser("change", help="swap variants etc. on specs in config")
|
||||
change_parser.add_argument("path", help="colon-separated path to config section with specs")
|
||||
change_parser.add_argument("--match-spec", help="only change constraints that match this")
|
||||
|
||||
prefer_upstream_parser = sp.add_parser(
|
||||
"prefer-upstream", help="set package preferences from upstream"
|
||||
)
|
||||
@@ -122,7 +118,7 @@ def _get_scope_and_section(args):
|
||||
if not section and not scope:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
scope = env.scope_name
|
||||
scope = env.env_file_config_scope_name()
|
||||
|
||||
# set scope defaults
|
||||
elif not scope:
|
||||
@@ -267,98 +263,6 @@ def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
|
||||
|
||||
|
||||
def _config_change_requires_scope(path, spec, scope, match_spec=None):
|
||||
"""Return whether or not anything changed."""
|
||||
require = spack.config.get(path, scope=scope)
|
||||
if not require:
|
||||
return False
|
||||
|
||||
changed = False
|
||||
|
||||
def override_cfg_spec(spec_str):
|
||||
nonlocal changed
|
||||
|
||||
init_spec = spack.spec.Spec(spec_str)
|
||||
# Overridden spec cannot be anonymous
|
||||
init_spec.name = spec.name
|
||||
if match_spec and not init_spec.satisfies(match_spec):
|
||||
# If there is a match_spec, don't change constraints that
|
||||
# don't match it
|
||||
return spec_str
|
||||
elif not init_spec.intersects(spec):
|
||||
changed = True
|
||||
return str(spack.spec.Spec.override(init_spec, spec))
|
||||
else:
|
||||
# Don't override things if they intersect, otherwise we'd
|
||||
# be e.g. attaching +debug to every single version spec
|
||||
return spec_str
|
||||
|
||||
if isinstance(require, str):
|
||||
new_require = override_cfg_spec(require)
|
||||
else:
|
||||
new_require = []
|
||||
for item in require:
|
||||
if "one_of" in item:
|
||||
item["one_of"] = [override_cfg_spec(x) for x in item["one_of"]]
|
||||
elif "any_of" in item:
|
||||
item["any_of"] = [override_cfg_spec(x) for x in item["any_of"]]
|
||||
elif "spec" in item:
|
||||
item["spec"] = override_cfg_spec(item["spec"])
|
||||
elif isinstance(item, str):
|
||||
item = override_cfg_spec(item)
|
||||
else:
|
||||
raise ValueError(f"Unexpected requirement: ({type(item)}) {str(item)}")
|
||||
new_require.append(item)
|
||||
|
||||
spack.config.set(path, new_require, scope=scope)
|
||||
return changed
|
||||
|
||||
|
||||
def _config_change(config_path, match_spec_str=None):
|
||||
all_components = spack.config.process_config_path(config_path)
|
||||
key_components = all_components[:-1]
|
||||
key_path = ":".join(key_components)
|
||||
|
||||
spec = spack.spec.Spec(syaml.syaml_str(all_components[-1]))
|
||||
|
||||
match_spec = None
|
||||
if match_spec_str:
|
||||
match_spec = spack.spec.Spec(match_spec_str)
|
||||
|
||||
if key_components[-1] == "require":
|
||||
# Extract the package name from the config path, which allows
|
||||
# args.spec to be anonymous if desired
|
||||
pkg_name = key_components[1]
|
||||
spec.name = pkg_name
|
||||
|
||||
changed = False
|
||||
for scope in spack.config.writable_scope_names():
|
||||
changed |= _config_change_requires_scope(key_path, spec, scope, match_spec=match_spec)
|
||||
|
||||
if not changed:
|
||||
existing_requirements = spack.config.get(key_path)
|
||||
if isinstance(existing_requirements, str):
|
||||
raise spack.config.ConfigError(
|
||||
"'config change' needs to append a requirement,"
|
||||
" but existing require: config is not a list"
|
||||
)
|
||||
|
||||
ideal_scope_to_modify = None
|
||||
for scope in spack.config.writable_scope_names():
|
||||
if spack.config.get(key_path, scope=scope):
|
||||
ideal_scope_to_modify = scope
|
||||
break
|
||||
|
||||
update_path = f"{key_path}:[{str(spec)}]"
|
||||
spack.config.add(update_path, scope=ideal_scope_to_modify)
|
||||
else:
|
||||
raise ValueError("'config change' can currently only change 'require' sections")
|
||||
|
||||
|
||||
def config_change(args):
|
||||
_config_change(args.path, args.match_spec)
|
||||
|
||||
|
||||
def config_update(args):
|
||||
# Read the configuration files
|
||||
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||
@@ -586,6 +490,5 @@ def config(parser, args):
|
||||
"update": config_update,
|
||||
"revert": config_revert,
|
||||
"prefer-upstream": config_prefer_upstream,
|
||||
"change": config_change,
|
||||
}
|
||||
action[args.config_command](args)
|
||||
|
||||
@@ -64,9 +64,8 @@ class {class_name}({base_class_name}):
|
||||
# maintainers("github_user1", "github_user2")
|
||||
|
||||
# FIXME: Add the SPDX identifier of the project's license below.
|
||||
# See https://spdx.org/licenses/ for a list. Upon manually verifying
|
||||
# the license, set checked_by to your Github username.
|
||||
license("UNKNOWN", checked_by="github_user1")
|
||||
# See https://spdx.org/licenses/ for a list.
|
||||
license("UNKNOWN")
|
||||
|
||||
{versions}
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
import spack.version
|
||||
@@ -22,7 +21,6 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument("-p", "--path", help="source location of package")
|
||||
subparser.add_argument("-b", "--build-directory", help="build directory for the package")
|
||||
|
||||
clone_group = subparser.add_mutually_exclusive_group()
|
||||
clone_group.add_argument(
|
||||
@@ -153,11 +151,4 @@ def develop(parser, args):
|
||||
env = spack.cmd.require_active_env(cmd_name="develop")
|
||||
tty.debug("Updating develop config for {0} transactionally".format(env.name))
|
||||
with env.write_transaction():
|
||||
if args.build_directory is not None:
|
||||
spack.config.add(
|
||||
"packages:{}:package_attributes:build_directory:{}".format(
|
||||
spec.name, args.build_directory
|
||||
),
|
||||
env.scope_name,
|
||||
)
|
||||
_update_config(spec, path)
|
||||
|
||||
@@ -54,104 +54,6 @@
|
||||
]
|
||||
|
||||
|
||||
#
|
||||
# env create
|
||||
#
|
||||
def env_create_setup_parser(subparser):
|
||||
"""create a new environment"""
|
||||
subparser.add_argument(
|
||||
"env_name",
|
||||
metavar="env",
|
||||
help=(
|
||||
"name of managed environment or directory of the anonymous env "
|
||||
"(when using --dir/-d) to activate"
|
||||
),
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--keep-relative",
|
||||
action="store_true",
|
||||
help="copy relative develop paths verbatim into the new environment"
|
||||
" when initializing from envfile",
|
||||
)
|
||||
view_opts = subparser.add_mutually_exclusive_group()
|
||||
view_opts.add_argument(
|
||||
"--without-view", action="store_true", help="do not maintain a view for this environment"
|
||||
)
|
||||
view_opts.add_argument(
|
||||
"--with-view",
|
||||
help="specify that this environment should maintain a view at the"
|
||||
" specified path (by default the view is maintained in the"
|
||||
" environment directory)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"envfile",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
)
|
||||
|
||||
|
||||
def env_create(args):
|
||||
if args.with_view:
|
||||
# Expand relative paths provided on the command line to the current working directory
|
||||
# This way we interpret `spack env create --with-view ./view --dir ./env` as
|
||||
# a view in $PWD/view, not $PWD/env/view. This is different from specifying a relative
|
||||
# path in the manifest, which is resolved relative to the manifest file's location.
|
||||
with_view = os.path.abspath(args.with_view)
|
||||
elif args.without_view:
|
||||
with_view = False
|
||||
else:
|
||||
# Note that 'None' means unspecified, in which case the Environment
|
||||
# object could choose to enable a view by default. False means that
|
||||
# the environment should not include a view.
|
||||
with_view = None
|
||||
|
||||
env = _env_create(
|
||||
args.env_name,
|
||||
init_file=args.envfile,
|
||||
dir=args.dir,
|
||||
with_view=with_view,
|
||||
keep_relative=args.keep_relative,
|
||||
)
|
||||
|
||||
# Generate views, only really useful for environments created from spack.lock files.
|
||||
env.regenerate_views()
|
||||
|
||||
|
||||
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
||||
"""Create a new environment, with an optional yaml description.
|
||||
|
||||
Arguments:
|
||||
name_or_path (str): name of the environment to create, or path to it
|
||||
init_file (str or file): optional initialization file -- can be
|
||||
a JSON lockfile (*.lock, *.json) or YAML manifest file
|
||||
dir (bool): if True, create an environment in a directory instead
|
||||
of a named environment
|
||||
keep_relative (bool): if True, develop paths are copied verbatim into
|
||||
the new environment file, otherwise they may be made absolute if the
|
||||
new environment is in a different location
|
||||
"""
|
||||
if not dir:
|
||||
env = ev.create(
|
||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||
)
|
||||
tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
|
||||
tty.msg("You can activate this environment with:")
|
||||
tty.msg(" spack env activate %s" % (name_or_path))
|
||||
return env
|
||||
|
||||
env = ev.create_in_dir(
|
||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||
)
|
||||
tty.msg("Created environment in %s" % env.path)
|
||||
tty.msg("You can activate this environment with:")
|
||||
tty.msg(" spack env activate %s" % env.path)
|
||||
return env
|
||||
|
||||
|
||||
#
|
||||
# env activate
|
||||
#
|
||||
@@ -216,46 +118,22 @@ def env_activate_setup_parser(subparser):
|
||||
help="decorate the command line prompt when activating",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
env_options = subparser.add_mutually_exclusive_group()
|
||||
env_options.add_argument(
|
||||
"--temp",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="create and activate an environment in a temporary directory",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--create",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="create and activate the environment if it doesn't exist",
|
||||
env_options.add_argument(
|
||||
"-d", "--dir", default=None, help="activate the environment in this directory"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--envfile",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--keep-relative",
|
||||
action="store_true",
|
||||
help="copy relative develop paths verbatim into the new environment"
|
||||
" when initializing from envfile",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-d",
|
||||
"--dir",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="activate environment based on the directory supplied",
|
||||
)
|
||||
subparser.add_argument(
|
||||
env_options.add_argument(
|
||||
metavar="env",
|
||||
dest="env_name",
|
||||
dest="activate_env",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help=(
|
||||
"name of managed environment or directory of the anonymous env"
|
||||
" (when using --dir/-d) to activate"
|
||||
),
|
||||
help="name of environment to activate",
|
||||
)
|
||||
|
||||
|
||||
@@ -284,17 +162,11 @@ def env_activate(args):
|
||||
if args.env or args.no_env or args.env_dir:
|
||||
tty.die("Calling spack env activate with --env, --env-dir and --no-env is ambiguous")
|
||||
|
||||
# special parser error handling relative to the --temp flag
|
||||
temp_conflicts = iter([args.keep_relative, args.dir, args.env_name, args.with_view])
|
||||
if args.temp and any(temp_conflicts):
|
||||
tty.die(
|
||||
"spack env activate --temp cannot be combined with managed environments, --with-view,"
|
||||
" --keep-relative, or --dir."
|
||||
)
|
||||
env_name_or_dir = args.activate_env or args.dir
|
||||
|
||||
# When executing `spack env activate` without further arguments, activate
|
||||
# the default environment. It's created when it doesn't exist yet.
|
||||
if not args.env_name and not args.temp:
|
||||
if not env_name_or_dir and not args.temp:
|
||||
short_name = "default"
|
||||
if not ev.exists(short_name):
|
||||
ev.create(short_name)
|
||||
@@ -313,25 +185,17 @@ def env_activate(args):
|
||||
_tty_info(f"Created and activated temporary environment in {env_path}")
|
||||
|
||||
# Managed environment
|
||||
elif ev.exists(args.env_name) and not args.dir:
|
||||
env_path = ev.root(args.env_name)
|
||||
short_name = args.env_name
|
||||
elif ev.exists(env_name_or_dir) and not args.dir:
|
||||
env_path = ev.root(env_name_or_dir)
|
||||
short_name = env_name_or_dir
|
||||
|
||||
# Environment directory
|
||||
elif ev.is_env_dir(args.env_name):
|
||||
env_path = os.path.abspath(args.env_name)
|
||||
elif ev.is_env_dir(env_name_or_dir):
|
||||
env_path = os.path.abspath(env_name_or_dir)
|
||||
short_name = os.path.basename(env_path)
|
||||
|
||||
# create if user requested, and then recall recursively
|
||||
elif args.create:
|
||||
tty.set_msg_enabled(False)
|
||||
env_create(args)
|
||||
tty.set_msg_enabled(True)
|
||||
env_activate(args)
|
||||
return
|
||||
|
||||
else:
|
||||
tty.die("No such environment: '%s'" % args.env_name)
|
||||
tty.die("No such environment: '%s'" % env_name_or_dir)
|
||||
|
||||
env_prompt = "[%s]" % short_name
|
||||
|
||||
@@ -426,6 +290,97 @@ def env_deactivate(args):
|
||||
sys.stdout.write(cmds)
|
||||
|
||||
|
||||
#
|
||||
# env create
|
||||
#
|
||||
def env_create_setup_parser(subparser):
|
||||
"""create a new environment"""
|
||||
subparser.add_argument("create_env", metavar="env", help="name of environment to create")
|
||||
subparser.add_argument(
|
||||
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--keep-relative",
|
||||
action="store_true",
|
||||
help="copy relative develop paths verbatim into the new environment"
|
||||
" when initializing from envfile",
|
||||
)
|
||||
view_opts = subparser.add_mutually_exclusive_group()
|
||||
view_opts.add_argument(
|
||||
"--without-view", action="store_true", help="do not maintain a view for this environment"
|
||||
)
|
||||
view_opts.add_argument(
|
||||
"--with-view",
|
||||
help="specify that this environment should maintain a view at the"
|
||||
" specified path (by default the view is maintained in the"
|
||||
" environment directory)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"envfile",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
)
|
||||
|
||||
|
||||
def env_create(args):
|
||||
if args.with_view:
|
||||
# Expand relative paths provided on the command line to the current working directory
|
||||
# This way we interpret `spack env create --with-view ./view --dir ./env` as
|
||||
# a view in $PWD/view, not $PWD/env/view. This is different from specifying a relative
|
||||
# path in the manifest, which is resolved relative to the manifest file's location.
|
||||
with_view = os.path.abspath(args.with_view)
|
||||
elif args.without_view:
|
||||
with_view = False
|
||||
else:
|
||||
# Note that 'None' means unspecified, in which case the Environment
|
||||
# object could choose to enable a view by default. False means that
|
||||
# the environment should not include a view.
|
||||
with_view = None
|
||||
|
||||
env = _env_create(
|
||||
args.create_env,
|
||||
init_file=args.envfile,
|
||||
dir=args.dir,
|
||||
with_view=with_view,
|
||||
keep_relative=args.keep_relative,
|
||||
)
|
||||
|
||||
# Generate views, only really useful for environments created from spack.lock files.
|
||||
env.regenerate_views()
|
||||
|
||||
|
||||
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
||||
"""Create a new environment, with an optional yaml description.
|
||||
|
||||
Arguments:
|
||||
name_or_path (str): name of the environment to create, or path to it
|
||||
init_file (str or file): optional initialization file -- can be
|
||||
a JSON lockfile (*.lock, *.json) or YAML manifest file
|
||||
dir (bool): if True, create an environment in a directory instead
|
||||
of a named environment
|
||||
keep_relative (bool): if True, develop paths are copied verbatim into
|
||||
the new environment file, otherwise they may be made absolute if the
|
||||
new environment is in a different location
|
||||
"""
|
||||
if not dir:
|
||||
env = ev.create(
|
||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||
)
|
||||
tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
|
||||
tty.msg("You can activate this environment with:")
|
||||
tty.msg(" spack env activate %s" % (name_or_path))
|
||||
return env
|
||||
|
||||
env = ev.create_in_dir(
|
||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||
)
|
||||
tty.msg("Created environment in %s" % env.path)
|
||||
tty.msg("You can activate this environment with:")
|
||||
tty.msg(" spack env activate %s" % env.path)
|
||||
return env
|
||||
|
||||
|
||||
#
|
||||
# env remove
|
||||
#
|
||||
|
||||
@@ -18,14 +18,7 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
subparser.epilog = """
|
||||
Outside of an environment, the command concretizes specs and graphs them, unless the
|
||||
--installed option is given. In that case specs are matched from the current DB.
|
||||
|
||||
If an environment is active, specs are matched from the currently available concrete specs
|
||||
in the lockfile.
|
||||
|
||||
"""
|
||||
method = subparser.add_mutually_exclusive_group()
|
||||
method.add_argument(
|
||||
"-a", "--ascii", action="store_true", help="draw graph as ascii to stdout (default)"
|
||||
@@ -48,40 +41,39 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-i", "--installed", action="store_true", help="graph specs from the DB"
|
||||
"-i",
|
||||
"--installed",
|
||||
action="store_true",
|
||||
help="graph installed specs, or specs in the active env (implies --dot)",
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ["deptype", "specs"])
|
||||
|
||||
|
||||
def graph(parser, args):
|
||||
env = ev.active_environment()
|
||||
if args.installed and env:
|
||||
tty.die("cannot use --installed with an active environment")
|
||||
if args.installed and args.specs:
|
||||
tty.die("cannot specify specs with --installed")
|
||||
|
||||
if args.color and not args.dot:
|
||||
tty.die("the --color option can be used only with --dot")
|
||||
|
||||
if args.installed:
|
||||
if not args.specs:
|
||||
specs = spack.store.STORE.db.query()
|
||||
args.dot = True
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
specs = env.concrete_roots()
|
||||
else:
|
||||
result = []
|
||||
for item in args.specs:
|
||||
result.extend(spack.store.STORE.db.query(item))
|
||||
specs = list(set(result))
|
||||
elif env:
|
||||
specs = env.concrete_roots()
|
||||
if args.specs:
|
||||
specs = env.all_matching_specs(*args.specs)
|
||||
specs = spack.store.STORE.db.query()
|
||||
|
||||
else:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=not args.static)
|
||||
|
||||
if not specs:
|
||||
tty.die("no spec matching the query")
|
||||
setup_parser.parser.print_help()
|
||||
return 1
|
||||
|
||||
if args.static:
|
||||
args.dot = True
|
||||
static_graph_dot(specs, depflag=args.deptype)
|
||||
return
|
||||
|
||||
|
||||
@@ -30,7 +30,6 @@
|
||||
@c{@min:max} version range (inclusive)
|
||||
@c{@min:} version <min> or higher
|
||||
@c{@:max} up to version <max> (inclusive)
|
||||
@c{@=version} exact version
|
||||
|
||||
compilers:
|
||||
@g{%compiler} build with <compiler>
|
||||
|
||||
@@ -290,11 +290,11 @@ def require_user_confirmation_for_overwrite(concrete_specs, args):
|
||||
def _dump_log_on_error(e: spack.build_environment.InstallError):
|
||||
e.print_context()
|
||||
assert e.pkg, "Expected InstallError to include the associated package"
|
||||
if not os.path.exists(e.pkg.log_path):
|
||||
if not os.path.exists(e.pkg.build_log_path):
|
||||
tty.error("'spack install' created no log.")
|
||||
else:
|
||||
sys.stderr.write("Full build log:\n")
|
||||
with open(e.pkg.log_path, errors="replace") as log:
|
||||
with open(e.pkg.build_log_path, errors="replace") as log:
|
||||
shutil.copyfileobj(log, sys.stderr)
|
||||
|
||||
|
||||
|
||||
@@ -292,11 +292,9 @@ def head(n, span_id, title, anchor=None):
|
||||
out.write("<dd>\n")
|
||||
out.write(
|
||||
", ".join(
|
||||
(
|
||||
d
|
||||
if d not in pkg_names
|
||||
else '<a class="reference internal" href="#%s">%s</a>' % (d, d)
|
||||
)
|
||||
d
|
||||
if d not in pkg_names
|
||||
else '<a class="reference internal" href="#%s">%s</a>' % (d, d)
|
||||
for d in deps
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import errno
|
||||
import gzip
|
||||
import io
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import spack.cmd
|
||||
import spack.spec
|
||||
import spack.util.compression as compression
|
||||
from spack.cmd.common import arguments
|
||||
from spack.main import SpackCommandError
|
||||
|
||||
description = "print out logs for packages"
|
||||
section = "basic"
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
|
||||
def _dump_byte_stream_to_stdout(instream: io.BufferedIOBase) -> None:
|
||||
# Reopen stdout in binary mode so we don't have to worry about encoding
|
||||
outstream = os.fdopen(sys.stdout.fileno(), "wb", closefd=False)
|
||||
shutil.copyfileobj(instream, outstream)
|
||||
|
||||
|
||||
def _logs(cmdline_spec: spack.spec.Spec, concrete_spec: spack.spec.Spec):
|
||||
if concrete_spec.installed:
|
||||
log_path = concrete_spec.package.install_log_path
|
||||
elif os.path.exists(concrete_spec.package.stage.path):
|
||||
# TODO: `spack logs` can currently not show the logs while a package is being built, as the
|
||||
# combined log file is only written after the build is finished.
|
||||
log_path = concrete_spec.package.log_path
|
||||
else:
|
||||
raise SpackCommandError(f"{cmdline_spec} is not installed or staged")
|
||||
|
||||
try:
|
||||
stream = open(log_path, "rb")
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
raise SpackCommandError(f"No logs are available for {cmdline_spec}") from e
|
||||
raise SpackCommandError(f"Error reading logs for {cmdline_spec}: {e}") from e
|
||||
|
||||
with stream as f:
|
||||
ext = compression.extension_from_magic_numbers_by_stream(f, decompress=False)
|
||||
if ext and ext != "gz":
|
||||
raise SpackCommandError(f"Unsupported storage format for {log_path}: {ext}")
|
||||
|
||||
# If the log file is gzip compressed, wrap it with a decompressor
|
||||
_dump_byte_stream_to_stdout(gzip.GzipFile(fileobj=f) if ext == "gz" else f)
|
||||
|
||||
|
||||
def logs(parser, args):
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
|
||||
if not specs:
|
||||
raise SpackCommandError("You must supply a spec.")
|
||||
|
||||
if len(specs) != 1:
|
||||
raise SpackCommandError("Too many specs. Supply only one.")
|
||||
|
||||
concrete_spec = spack.cmd.matching_spec_from_env(specs[0])
|
||||
|
||||
_logs(specs[0], concrete_spec)
|
||||
@@ -135,6 +135,8 @@ def _process_result(result, show, required_format, kwargs):
|
||||
|
||||
def solve(parser, args):
|
||||
# these are the same options as `spack spec`
|
||||
install_status_fn = spack.spec.Spec.install_status
|
||||
|
||||
fmt = spack.spec.DISPLAY_FORMAT
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
@@ -144,7 +146,7 @@ def solve(parser, args):
|
||||
"format": fmt,
|
||||
"hashlen": None if args.very_long else 7,
|
||||
"show_types": args.types,
|
||||
"install_status": args.install_status,
|
||||
"status_fn": install_status_fn if args.install_status else None,
|
||||
"hashes": args.long or args.very_long,
|
||||
}
|
||||
|
||||
|
||||
@@ -75,6 +75,8 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def spec(parser, args):
|
||||
install_status_fn = spack.spec.Spec.install_status
|
||||
|
||||
fmt = spack.spec.DISPLAY_FORMAT
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
@@ -84,7 +86,7 @@ def spec(parser, args):
|
||||
"format": fmt,
|
||||
"hashlen": None if args.very_long else 7,
|
||||
"show_types": args.types,
|
||||
"install_status": args.install_status,
|
||||
"status_fn": install_status_fn if args.install_status else None,
|
||||
}
|
||||
|
||||
# use a read transaction if we are getting install status for every
|
||||
|
||||
@@ -514,10 +514,9 @@ def get_compilers(config, cspec=None, arch_spec=None):
|
||||
for items in config:
|
||||
items = items["compiler"]
|
||||
|
||||
# We might use equality here.
|
||||
if cspec and not spack.spec.parse_with_version_concrete(
|
||||
items["spec"], compiler=True
|
||||
).satisfies(cspec):
|
||||
# NOTE: in principle this should be equality not satisfies, but config can still
|
||||
# be written in old format gcc@10.1.0 instead of gcc@=10.1.0.
|
||||
if cspec and not cspec.satisfies(items["spec"]):
|
||||
continue
|
||||
|
||||
# If an arch spec is given, confirm that this compiler
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Dict, List, Set
|
||||
|
||||
import spack.compiler
|
||||
@@ -16,7 +15,7 @@
|
||||
import spack.util.executable
|
||||
from spack.compiler import Compiler
|
||||
from spack.error import SpackError
|
||||
from spack.version import Version, VersionRange
|
||||
from spack.version import Version
|
||||
|
||||
avail_fc_version: Set[str] = set()
|
||||
fc_path: Dict[str, str] = dict()
|
||||
@@ -293,15 +292,6 @@ def setup_custom_environment(self, pkg, env):
|
||||
else:
|
||||
env.set_path(env_var, int_env[env_var].split(os.pathsep))
|
||||
|
||||
# certain versions of ifx (2021.3.0:2023.1.0) do not play well with env:TMP
|
||||
# that has a "." character in the path
|
||||
# Work around by pointing tmp to the stage for the duration of the build
|
||||
if self.fc and Version(self.fc_version(self.fc)).satisfies(
|
||||
VersionRange("2021.3.0", "2023.1.0")
|
||||
):
|
||||
new_tmp = tempfile.mkdtemp(dir=pkg.stage.path)
|
||||
env.set("TMP", new_tmp)
|
||||
|
||||
env.set("CC", self.cc)
|
||||
env.set("CXX", self.cxx)
|
||||
env.set("FC", self.fc)
|
||||
|
||||
@@ -826,6 +826,7 @@ def __init__(self, spec):
|
||||
|
||||
|
||||
class InsufficientArchitectureInfoError(spack.error.SpackError):
|
||||
|
||||
"""Raised when details on architecture cannot be collected from the
|
||||
system"""
|
||||
|
||||
|
||||
@@ -63,11 +63,10 @@
|
||||
from spack.util.cpus import cpus_available
|
||||
|
||||
#: Dict from section names -> schema for that section
|
||||
SECTION_SCHEMAS: Dict[str, Any] = {
|
||||
SECTION_SCHEMAS = {
|
||||
"compilers": spack.schema.compilers.schema,
|
||||
"concretizer": spack.schema.concretizer.schema,
|
||||
"definitions": spack.schema.definitions.schema,
|
||||
"view": spack.schema.view.schema,
|
||||
"develop": spack.schema.develop.schema,
|
||||
"mirrors": spack.schema.mirrors.schema,
|
||||
"repos": spack.schema.repos.schema,
|
||||
@@ -82,7 +81,7 @@
|
||||
|
||||
# Same as above, but including keys for environments
|
||||
# this allows us to unify config reading between configs and environments
|
||||
_ALL_SCHEMAS: Dict[str, Any] = copy.deepcopy(SECTION_SCHEMAS)
|
||||
_ALL_SCHEMAS = copy.deepcopy(SECTION_SCHEMAS)
|
||||
_ALL_SCHEMAS.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
|
||||
#: Path to the default configuration
|
||||
@@ -639,6 +638,7 @@ def get(self, path: str, default: Optional[Any] = None, scope: Optional[str] = N
|
||||
|
||||
We use ``:`` as the separator, like YAML objects.
|
||||
"""
|
||||
# TODO: Currently only handles maps. Think about lists if needed.
|
||||
parts = process_config_path(path)
|
||||
section = parts.pop(0)
|
||||
|
||||
@@ -883,9 +883,7 @@ def add(fullpath: str, scope: Optional[str] = None) -> None:
|
||||
has_existing_value = True
|
||||
path = ""
|
||||
override = False
|
||||
value = components[-1]
|
||||
if not isinstance(value, syaml.syaml_str):
|
||||
value = syaml.load_config(value)
|
||||
value = syaml.load_config(components[-1])
|
||||
for idx, name in enumerate(components[:-1]):
|
||||
# First handle double colons in constructing path
|
||||
colon = "::" if override else ":" if path else ""
|
||||
@@ -907,7 +905,7 @@ def add(fullpath: str, scope: Optional[str] = None) -> None:
|
||||
|
||||
# construct value from this point down
|
||||
for component in reversed(components[idx + 1 : -1]):
|
||||
value: Dict[str, str] = {component: value} # type: ignore[no-redef]
|
||||
value = {component: value}
|
||||
break
|
||||
|
||||
if override:
|
||||
@@ -918,7 +916,7 @@ def add(fullpath: str, scope: Optional[str] = None) -> None:
|
||||
|
||||
# append values to lists
|
||||
if isinstance(existing, list) and not isinstance(value, list):
|
||||
value: List[str] = [value] # type: ignore[no-redef]
|
||||
value = [value]
|
||||
|
||||
# merge value into existing
|
||||
new = merge_yaml(existing, value)
|
||||
@@ -951,8 +949,7 @@ def scopes() -> Dict[str, ConfigScope]:
|
||||
|
||||
def writable_scopes() -> List[ConfigScope]:
|
||||
"""
|
||||
Return list of writable scopes. Higher-priority scopes come first in the
|
||||
list.
|
||||
Return list of writable scopes
|
||||
"""
|
||||
return list(
|
||||
reversed(
|
||||
@@ -1097,7 +1094,7 @@ def read_config_file(
|
||||
data = syaml.load_config(f)
|
||||
|
||||
if data:
|
||||
if schema is None:
|
||||
if not schema:
|
||||
key = next(iter(data))
|
||||
schema = _ALL_SCHEMAS[key]
|
||||
validate(data, schema)
|
||||
@@ -1339,141 +1336,56 @@ def they_are(t):
|
||||
return copy.copy(source)
|
||||
|
||||
|
||||
class ConfigPath:
|
||||
quoted_string = "(?:\"[^\"]+\")|(?:'[^']+')"
|
||||
unquoted_string = "[^:'\"]+"
|
||||
element = rf"(?:(?:{quoted_string})|(?:{unquoted_string}))"
|
||||
next_key_pattern = rf"({element}[+-]?)(?:\:|$)"
|
||||
|
||||
@staticmethod
|
||||
def _split_front(string, extract):
|
||||
m = re.match(extract, string)
|
||||
if not m:
|
||||
return None, None
|
||||
token = m.group(1)
|
||||
return token, string[len(token) :]
|
||||
|
||||
@staticmethod
|
||||
def _validate(path):
|
||||
"""Example valid config paths:
|
||||
|
||||
x:y:z
|
||||
x:"y":z
|
||||
x:y+:z
|
||||
x:y::z
|
||||
x:y+::z
|
||||
x:y:
|
||||
x:y::
|
||||
"""
|
||||
first_key, path = ConfigPath._split_front(path, ConfigPath.next_key_pattern)
|
||||
if not first_key:
|
||||
raise ValueError(f"Config path does not start with a parse-able key: {path}")
|
||||
path_elements = [first_key]
|
||||
path_index = 1
|
||||
while path:
|
||||
separator, path = ConfigPath._split_front(path, r"(\:+)")
|
||||
if not separator:
|
||||
raise ValueError(f"Expected separator for {path}")
|
||||
|
||||
path_elements[path_index - 1] += separator
|
||||
if not path:
|
||||
break
|
||||
|
||||
element, remainder = ConfigPath._split_front(path, ConfigPath.next_key_pattern)
|
||||
if not element:
|
||||
# If we can't parse something as a key, then it must be a
|
||||
# value (if it's valid).
|
||||
try:
|
||||
syaml.load_config(path)
|
||||
except spack.util.spack_yaml.SpackYAMLError as e:
|
||||
raise ValueError(
|
||||
"Remainder of path is not a valid key"
|
||||
f" and does not parse as a value {path}"
|
||||
) from e
|
||||
element = path
|
||||
path = None # The rest of the path was consumed into the value
|
||||
else:
|
||||
path = remainder
|
||||
|
||||
path_elements.append(element)
|
||||
path_index += 1
|
||||
|
||||
return path_elements
|
||||
|
||||
@staticmethod
|
||||
def process(path):
|
||||
result = []
|
||||
quote = "['\"]"
|
||||
seen_override_in_path = False
|
||||
|
||||
path_elements = ConfigPath._validate(path)
|
||||
last_element_idx = len(path_elements) - 1
|
||||
for i, element in enumerate(path_elements):
|
||||
override = False
|
||||
append = False
|
||||
prepend = False
|
||||
quoted = False
|
||||
if element.endswith("::") or (element.endswith(":") and i == last_element_idx):
|
||||
if seen_override_in_path:
|
||||
raise syaml.SpackYAMLError(
|
||||
"Meaningless second override indicator `::' in path `{0}'".format(path), ""
|
||||
)
|
||||
override = True
|
||||
seen_override_in_path = True
|
||||
element = element.rstrip(":")
|
||||
|
||||
if element.endswith("+"):
|
||||
prepend = True
|
||||
elif element.endswith("-"):
|
||||
append = True
|
||||
element = element.rstrip("+-")
|
||||
|
||||
if re.match(f"^{quote}", element):
|
||||
quoted = True
|
||||
element = element.strip("'\"")
|
||||
|
||||
if any([append, prepend, override, quoted]):
|
||||
element = syaml.syaml_str(element)
|
||||
if append:
|
||||
element.append = True
|
||||
if prepend:
|
||||
element.prepend = True
|
||||
if override:
|
||||
element.override = True
|
||||
|
||||
result.append(element)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def process_config_path(path: str) -> List[str]:
|
||||
"""Process a path argument to config.set() that may contain overrides ('::' or
|
||||
trailing ':')
|
||||
|
||||
Colons will be treated as static strings if inside of quotes,
|
||||
e.g. `this:is:a:path:'value:with:colon'` will yield:
|
||||
Note: quoted value path components will be processed as a single value (escaping colons)
|
||||
quoted path components outside of the value will be considered ill formed and will
|
||||
raise.
|
||||
e.g. `this:is:a:path:'value:with:colon'` will yield:
|
||||
|
||||
[this, is, a, path, value:with:colon]
|
||||
|
||||
The path may consist only of keys (e.g. for a `get`) or may end in a value.
|
||||
Keys are always strings: if a user encloses a key in quotes, the quotes
|
||||
should be removed. Values with quotes should be treated as strings,
|
||||
but without quotes, may be parsed as a different yaml object (e.g.
|
||||
'{}' is a dict, but '"{}"' is a string).
|
||||
|
||||
This function does not know whether the final element of the path is a
|
||||
key or value, so:
|
||||
|
||||
* It must strip the quotes, in case it is a key (so we look for "key" and
|
||||
not '"key"'))
|
||||
* It must indicate somehow that the quotes were stripped, in case it is a
|
||||
value (so that we don't process '"{}"' as a YAML dict)
|
||||
|
||||
Therefore, all elements with quotes are stripped, and then also converted
|
||||
to ``syaml_str`` (if treating the final element as a value, the caller
|
||||
should not parse it in this case).
|
||||
[this, is, a, path, value:with:colon]
|
||||
"""
|
||||
return ConfigPath.process(path)
|
||||
result = []
|
||||
if path.startswith(":"):
|
||||
raise syaml.SpackYAMLError(f"Illegal leading `:' in path `{path}'", "")
|
||||
seen_override_in_path = False
|
||||
while path:
|
||||
front, sep, path = path.partition(":")
|
||||
if (sep and not path) or path.startswith(":"):
|
||||
if seen_override_in_path:
|
||||
raise syaml.SpackYAMLError(
|
||||
f"Meaningless second override indicator `::' in path `{path}'", ""
|
||||
)
|
||||
path = path.lstrip(":")
|
||||
front = syaml.syaml_str(front)
|
||||
front.override = True # type: ignore[attr-defined]
|
||||
seen_override_in_path = True
|
||||
|
||||
elif front.endswith("+"):
|
||||
front = front.rstrip("+")
|
||||
front = syaml.syaml_str(front)
|
||||
front.prepend = True # type: ignore[attr-defined]
|
||||
|
||||
elif front.endswith("-"):
|
||||
front = front.rstrip("-")
|
||||
front = syaml.syaml_str(front)
|
||||
front.append = True # type: ignore[attr-defined]
|
||||
|
||||
result.append(front)
|
||||
|
||||
quote = "['\"]"
|
||||
not_quote = "[^'\"]"
|
||||
|
||||
if re.match(f"^{quote}", path):
|
||||
m = re.match(rf"^({quote}{not_quote}+{quote})$", path)
|
||||
if not m:
|
||||
raise ValueError("Quotes indicate value, but there are additional path entries")
|
||||
result.append(m.group(1))
|
||||
break
|
||||
|
||||
return result
|
||||
|
||||
|
||||
#
|
||||
|
||||
@@ -71,7 +71,7 @@
|
||||
"almalinux:9": {
|
||||
"bootstrap": {
|
||||
"template": "container/almalinux_9.dockerfile",
|
||||
"image": "quay.io/almalinuxorg/almalinux:9"
|
||||
"image": "quay.io/almalinux/almalinux:9"
|
||||
},
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/almalinux9",
|
||||
@@ -79,13 +79,13 @@
|
||||
"develop": "latest"
|
||||
},
|
||||
"final": {
|
||||
"image": "quay.io/almalinuxorg/almalinux:9"
|
||||
"image": "quay.io/almalinux/almalinux:9"
|
||||
}
|
||||
},
|
||||
"almalinux:8": {
|
||||
"bootstrap": {
|
||||
"template": "container/almalinux_8.dockerfile",
|
||||
"image": "quay.io/almalinuxorg/almalinux:8"
|
||||
"image": "quay.io/almalinux/almalinux:8"
|
||||
},
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/almalinux8",
|
||||
@@ -93,7 +93,7 @@
|
||||
"develop": "latest"
|
||||
},
|
||||
"final": {
|
||||
"image": "quay.io/almalinuxorg/almalinux:8"
|
||||
"image": "quay.io/almalinux/almalinux:8"
|
||||
}
|
||||
},
|
||||
"centos:stream": {
|
||||
|
||||
@@ -36,9 +36,6 @@
|
||||
#: Default dependency type if none is specified
|
||||
DEFAULT: DepFlag = BUILD | LINK
|
||||
|
||||
#: A flag with no dependency types set
|
||||
NONE: DepFlag = 0
|
||||
|
||||
#: An iterator of all flag components
|
||||
ALL_FLAGS: Tuple[DepFlag, DepFlag, DepFlag, DepFlag] = (BUILD, LINK, RUN, TEST)
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ class OpenMpi(Package):
|
||||
import functools
|
||||
import os.path
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Set, Tuple, Union
|
||||
from typing import Any, Callable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty.color
|
||||
@@ -57,9 +57,6 @@ class OpenMpi(Package):
|
||||
VersionLookupError,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import spack.package_base
|
||||
|
||||
__all__ = [
|
||||
"DirectiveError",
|
||||
"DirectiveMeta",
|
||||
@@ -352,7 +349,6 @@ def remove_directives(arg):
|
||||
return _decorator
|
||||
|
||||
|
||||
SubmoduleCallback = Callable[["spack.package_base.PackageBase"], Union[str, List[str], bool]]
|
||||
directive = DirectiveMeta.directive
|
||||
|
||||
|
||||
@@ -384,7 +380,7 @@ def version(
|
||||
tag: Optional[str] = None,
|
||||
branch: Optional[str] = None,
|
||||
get_full_repo: Optional[bool] = None,
|
||||
submodules: Union[SubmoduleCallback, Optional[bool]] = None,
|
||||
submodules: Optional[bool] = None,
|
||||
submodules_delete: Optional[bool] = None,
|
||||
# other version control
|
||||
svn: Optional[str] = None,
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.link_tree import ConflictingSpecsError
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
@@ -82,30 +83,17 @@
|
||||
lockfile_name = "spack.lock"
|
||||
|
||||
|
||||
#: Name of the directory where environments store repos, logs, views, configs
|
||||
#: Name of the directory where environments store repos, logs, views
|
||||
env_subdir_name = ".spack-env"
|
||||
|
||||
|
||||
def env_root_path() -> str:
|
||||
def env_root_path():
|
||||
"""Override default root path if the user specified it"""
|
||||
return spack.util.path.canonicalize_path(
|
||||
spack.config.get("config:environments_root", default=default_env_path)
|
||||
)
|
||||
|
||||
|
||||
def environment_name(path: Union[str, pathlib.Path]) -> str:
|
||||
"""Human-readable representation of the environment.
|
||||
|
||||
This is the path for directory environments, and just the name
|
||||
for managed environments.
|
||||
"""
|
||||
path_str = str(path)
|
||||
if path_str.startswith(env_root_path()):
|
||||
return os.path.basename(path_str)
|
||||
else:
|
||||
return path_str
|
||||
|
||||
|
||||
def check_disallowed_env_config_mods(scopes):
|
||||
for scope in scopes:
|
||||
with spack.config.use_configuration(scope):
|
||||
@@ -191,8 +179,9 @@ def validate_env_name(name):
|
||||
def activate(env, use_env_repo=False):
|
||||
"""Activate an environment.
|
||||
|
||||
To activate an environment, we add its manifest's configuration scope to the
|
||||
existing Spack configuration, and we set active to the current environment.
|
||||
To activate an environment, we add its configuration scope to the
|
||||
existing Spack configuration, and we set active to the current
|
||||
environment.
|
||||
|
||||
Arguments:
|
||||
env (Environment): the environment to activate
|
||||
@@ -209,7 +198,7 @@ def activate(env, use_env_repo=False):
|
||||
# below.
|
||||
install_tree_before = spack.config.get("config:install_tree")
|
||||
upstreams_before = spack.config.get("upstreams")
|
||||
env.manifest.prepare_config_scope()
|
||||
prepare_config_scope(env)
|
||||
install_tree_after = spack.config.get("config:install_tree")
|
||||
upstreams_after = spack.config.get("upstreams")
|
||||
if install_tree_before != install_tree_after or upstreams_before != upstreams_after:
|
||||
@@ -237,7 +226,7 @@ def deactivate():
|
||||
if hasattr(_active_environment, "store_token"):
|
||||
spack.store.restore(_active_environment.store_token)
|
||||
delattr(_active_environment, "store_token")
|
||||
_active_environment.manifest.deactivate_config_scope()
|
||||
deactivate_config_scope(_active_environment)
|
||||
|
||||
# use _repo so we only remove if a repo was actually constructed
|
||||
if _active_environment._repo:
|
||||
@@ -374,12 +363,12 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
|
||||
to store the environment in a different directory, we have to rewrite
|
||||
relative paths to absolute ones."""
|
||||
with env:
|
||||
dev_specs = spack.config.get("develop", default={}, scope=env.scope_name)
|
||||
dev_specs = spack.config.get("develop", default={}, scope=env.env_file_config_scope_name())
|
||||
if not dev_specs:
|
||||
return
|
||||
for name, entry in dev_specs.items():
|
||||
dev_path = substitute_path_variables(entry["path"])
|
||||
expanded_path = spack.util.path.canonicalize_path(dev_path, default_wd=init_file_dir)
|
||||
dev_path = entry["path"]
|
||||
expanded_path = os.path.normpath(os.path.join(init_file_dir, entry["path"]))
|
||||
|
||||
# Skip if the expanded path is the same (e.g. when absolute)
|
||||
if dev_path == expanded_path:
|
||||
@@ -389,7 +378,7 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
|
||||
|
||||
dev_specs[name]["path"] = expanded_path
|
||||
|
||||
spack.config.set("develop", dev_specs, scope=env.scope_name)
|
||||
spack.config.set("develop", dev_specs, scope=env.env_file_config_scope_name())
|
||||
|
||||
env._dev_specs = None
|
||||
# If we changed the environment's spack.yaml scope, that will not be reflected
|
||||
@@ -610,33 +599,39 @@ def content_hash(self, specs):
|
||||
return spack.util.hash.b32_hash(contents)
|
||||
|
||||
def get_projection_for_spec(self, spec):
|
||||
"""Get projection for spec. This function does not require the view
|
||||
to exist on the filesystem."""
|
||||
return self._view(self.root).get_projection_for_spec(spec)
|
||||
"""Get projection for spec relative to view root
|
||||
|
||||
def view(self, new: Optional[str] = None) -> SimpleFilesystemView:
|
||||
Getting the projection from the underlying root will get the temporary
|
||||
projection. This gives the permanent projection relative to the root
|
||||
symlink.
|
||||
"""
|
||||
Returns a view object for the *underlying* view directory. This means that the
|
||||
self.root symlink is followed, and that the view has to exist on the filesystem
|
||||
(unless ``new``). This function is useful when writing to the view.
|
||||
view = self.view()
|
||||
view_path = view.get_projection_for_spec(spec)
|
||||
rel_path = os.path.relpath(view_path, self._current_root)
|
||||
return os.path.join(self.root, rel_path)
|
||||
|
||||
def view(self, new=None):
|
||||
"""
|
||||
Generate the FilesystemView object for this ViewDescriptor
|
||||
|
||||
By default, this method returns a FilesystemView object rooted at the
|
||||
current underlying root of this ViewDescriptor (self._current_root)
|
||||
|
||||
Raise if new is None and there is no current view
|
||||
|
||||
Arguments:
|
||||
new: If a string, create a FilesystemView rooted at that path. Default None. This
|
||||
should only be used to regenerate the view, and cannot be used to access specs.
|
||||
new (str or None): If a string, create a FilesystemView
|
||||
rooted at that path. Default None. This should only be used to
|
||||
regenerate the view, and cannot be used to access specs.
|
||||
"""
|
||||
root = new if new else self._current_root
|
||||
if not root:
|
||||
# This can only be hit if we write a future bug
|
||||
raise SpackEnvironmentViewError(
|
||||
msg = (
|
||||
"Attempting to get nonexistent view from environment. "
|
||||
f"View root is at {self.root}"
|
||||
"View root is at %s" % self.root
|
||||
)
|
||||
return self._view(root)
|
||||
|
||||
def _view(self, root: str) -> SimpleFilesystemView:
|
||||
"""Returns a view object for a given root dir."""
|
||||
raise SpackEnvironmentViewError(msg)
|
||||
return SimpleFilesystemView(
|
||||
root,
|
||||
spack.store.STORE.layout,
|
||||
@@ -662,26 +657,30 @@ def __contains__(self, spec):
|
||||
|
||||
return True
|
||||
|
||||
def specs_for_view(self, concrete_roots: List[Spec]) -> List[Spec]:
|
||||
"""Flatten the DAGs of the concrete roots, keep only unique, selected, and installed specs
|
||||
in topological order from root to leaf."""
|
||||
if self.link == "all":
|
||||
deptype = dt.LINK | dt.RUN
|
||||
elif self.link == "run":
|
||||
deptype = dt.RUN
|
||||
def specs_for_view(self, concretized_root_specs):
|
||||
"""
|
||||
From the list of concretized user specs in the environment, flatten
|
||||
the dags, and filter selected, installed specs, remove duplicates on dag hash.
|
||||
"""
|
||||
# With deps, requires traversal
|
||||
if self.link == "all" or self.link == "run":
|
||||
deptype = ("run") if self.link == "run" else ("link", "run")
|
||||
specs = list(
|
||||
traverse.traverse_nodes(
|
||||
concretized_root_specs, deptype=deptype, key=traverse.by_dag_hash
|
||||
)
|
||||
)
|
||||
else:
|
||||
deptype = dt.NONE
|
||||
|
||||
specs = traverse.traverse_nodes(
|
||||
concrete_roots, order="topo", deptype=deptype, key=traverse.by_dag_hash
|
||||
)
|
||||
specs = list(dedupe(concretized_root_specs, key=traverse.by_dag_hash))
|
||||
|
||||
# Filter selected, installed specs
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
return [s for s in specs if s in self and s.installed]
|
||||
specs = [s for s in specs if s in self and s.installed]
|
||||
|
||||
def regenerate(self, concrete_roots: List[Spec]) -> None:
|
||||
specs = self.specs_for_view(concrete_roots)
|
||||
return specs
|
||||
|
||||
def regenerate(self, concretized_root_specs):
|
||||
specs = self.specs_for_view(concretized_root_specs)
|
||||
|
||||
# To ensure there are no conflicts with packages being installed
|
||||
# that cannot be resolved or have repos that have been removed
|
||||
@@ -698,14 +697,14 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
|
||||
old_root = self._current_root
|
||||
|
||||
if new_root == old_root:
|
||||
tty.debug(f"View at {self.root} does not need regeneration.")
|
||||
tty.debug("View at %s does not need regeneration." % self.root)
|
||||
return
|
||||
|
||||
_error_on_nonempty_view_dir(new_root)
|
||||
|
||||
# construct view at new_root
|
||||
if specs:
|
||||
tty.msg(f"Updating view at {self.root}")
|
||||
tty.msg("Updating view at {0}".format(self.root))
|
||||
|
||||
view = self.view(new=new_root)
|
||||
|
||||
@@ -715,7 +714,7 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
|
||||
# Create a new view
|
||||
try:
|
||||
fs.mkdirp(new_root)
|
||||
view.add_specs(*specs)
|
||||
view.add_specs(*specs, with_dependencies=False)
|
||||
|
||||
# create symlink from tmp_symlink_name to new_root
|
||||
if os.path.exists(tmp_symlink_name):
|
||||
@@ -729,7 +728,7 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
|
||||
try:
|
||||
shutil.rmtree(new_root, ignore_errors=True)
|
||||
os.unlink(tmp_symlink_name)
|
||||
except OSError:
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
# Give an informative error message for the typical error case: two specs, same package
|
||||
@@ -770,17 +769,6 @@ def _create_environment(path):
|
||||
return Environment(path)
|
||||
|
||||
|
||||
def env_subdir_path(manifest_dir: Union[str, pathlib.Path]) -> str:
|
||||
"""Path to where the environment stores repos, logs, views, configs.
|
||||
|
||||
Args:
|
||||
manifest_dir: directory containing the environment manifest file
|
||||
|
||||
Returns: directory the environment uses to manage its files
|
||||
"""
|
||||
return os.path.join(str(manifest_dir), env_subdir_name)
|
||||
|
||||
|
||||
class Environment:
|
||||
"""A Spack environment, which bundles together configuration and a list of specs."""
|
||||
|
||||
@@ -792,13 +780,12 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
||||
manifest_dir: directory with the "spack.yaml" associated with the environment
|
||||
"""
|
||||
self.path = os.path.abspath(str(manifest_dir))
|
||||
self.name = environment_name(self.path)
|
||||
self.env_subdir_path = env_subdir_path(self.path)
|
||||
|
||||
self.txlock = lk.Lock(self._transaction_lock_path)
|
||||
|
||||
self._unify = None
|
||||
self.new_specs: List[Spec] = []
|
||||
self.new_installs: List[Spec] = []
|
||||
self.views: Dict[str, ViewDescriptor] = {}
|
||||
|
||||
#: Specs from "spack.yaml"
|
||||
@@ -815,15 +802,9 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
||||
self._previous_active = None
|
||||
self._dev_specs = None
|
||||
|
||||
# Load the manifest file contents into memory
|
||||
self._load_manifest_file()
|
||||
|
||||
def _load_manifest_file(self):
|
||||
"""Instantiate and load the manifest file contents into memory."""
|
||||
with lk.ReadTransaction(self.txlock):
|
||||
self.manifest = EnvironmentManifestFile(self.path)
|
||||
with self.manifest.use_config():
|
||||
self._read()
|
||||
self.manifest = EnvironmentManifestFile(manifest_dir)
|
||||
self._read()
|
||||
|
||||
@property
|
||||
def unify(self):
|
||||
@@ -841,10 +822,19 @@ def __reduce__(self):
|
||||
def _re_read(self):
|
||||
"""Reinitialize the environment object."""
|
||||
self.clear(re_read=True)
|
||||
self._load_manifest_file()
|
||||
self.manifest = EnvironmentManifestFile(self.path)
|
||||
self._read(re_read=True)
|
||||
|
||||
def _read(self):
|
||||
self._construct_state_from_manifest()
|
||||
def _read(self, re_read=False):
|
||||
# If the manifest has included files, then some of the information
|
||||
# (e.g., definitions) MAY be in those files. So we need to ensure
|
||||
# the config is populated with any associated spec lists in order
|
||||
# to fully construct the manifest state.
|
||||
includes = self.manifest[TOP_LEVEL_KEY].get("include", [])
|
||||
if includes and not re_read:
|
||||
prepare_config_scope(self)
|
||||
|
||||
self._construct_state_from_manifest(re_read)
|
||||
|
||||
if os.path.exists(self.lock_path):
|
||||
with open(self.lock_path) as f:
|
||||
@@ -871,67 +861,38 @@ def _process_definition(self, item):
|
||||
else:
|
||||
self.spec_lists[name] = user_specs
|
||||
|
||||
def _process_view(self, env_view: Optional[Union[bool, str, Dict]]):
|
||||
"""Process view option(s), which can be boolean, string, or None.
|
||||
|
||||
A boolean environment view option takes precedence over any that may
|
||||
be included. So ``view: True`` results in the default view only. And
|
||||
``view: False`` means the environment will have no view.
|
||||
|
||||
Args:
|
||||
env_view: view option provided in the manifest or configuration
|
||||
"""
|
||||
|
||||
def add_view(name, values):
|
||||
"""Add the view with the name and the string or dict values."""
|
||||
if isinstance(values, str):
|
||||
self.views[name] = ViewDescriptor(self.path, values)
|
||||
elif isinstance(values, dict):
|
||||
self.views[name] = ViewDescriptor.from_dict(self.path, values)
|
||||
else:
|
||||
tty.error(f"Cannot add view named {name} for {type(values)} values {values}")
|
||||
|
||||
# If the configuration specifies 'view: False' then we are done
|
||||
# processing views. If this is called with the environment's view
|
||||
# view (versus an included view), then there are to be NO views.
|
||||
if env_view is False:
|
||||
return
|
||||
|
||||
# If the configuration specifies 'view: True' then only the default
|
||||
# view will be created for the environment and we are done processing
|
||||
# views.
|
||||
if env_view is True:
|
||||
add_view(default_view_name, self.view_path_default)
|
||||
return
|
||||
|
||||
# Otherwise, the configuration has a subdirectory or dictionary.
|
||||
if isinstance(env_view, str):
|
||||
add_view(default_view_name, env_view)
|
||||
elif env_view:
|
||||
for name, values in env_view.items():
|
||||
add_view(name, values)
|
||||
|
||||
# If we reach this point without an explicit view option then we
|
||||
# provide the default view.
|
||||
if self.views == dict():
|
||||
self.views[default_view_name] = ViewDescriptor(self.path, self.view_path_default)
|
||||
|
||||
def _construct_state_from_manifest(self):
|
||||
"""Set up user specs and views from the manifest file."""
|
||||
def _construct_state_from_manifest(self, re_read=False):
|
||||
"""Read manifest file and set up user specs."""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
self.views = {}
|
||||
|
||||
for item in spack.config.get("definitions", []):
|
||||
self._process_definition(item)
|
||||
if not re_read:
|
||||
for item in spack.config.get("definitions", []):
|
||||
self._process_definition(item)
|
||||
|
||||
env_configuration = self.manifest[TOP_LEVEL_KEY]
|
||||
for item in env_configuration.get("definitions", []):
|
||||
self._process_definition(item)
|
||||
|
||||
spec_list = env_configuration.get(user_speclist_name, [])
|
||||
user_specs = SpecList(
|
||||
user_speclist_name, [s for s in spec_list if s], self.spec_lists.copy()
|
||||
)
|
||||
self.spec_lists[user_speclist_name] = user_specs
|
||||
|
||||
self._process_view(spack.config.get("view", True))
|
||||
enable_view = env_configuration.get("view")
|
||||
# enable_view can be boolean, string, or None
|
||||
if enable_view is True or enable_view is None:
|
||||
self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
|
||||
elif isinstance(enable_view, str):
|
||||
self.views = {default_view_name: ViewDescriptor(self.path, enable_view)}
|
||||
elif enable_view:
|
||||
path = self.path
|
||||
self.views = dict(
|
||||
(name, ViewDescriptor.from_dict(path, values))
|
||||
for name, values in enable_view.items()
|
||||
)
|
||||
else:
|
||||
self.views = {}
|
||||
|
||||
@property
|
||||
def user_specs(self):
|
||||
@@ -960,8 +921,10 @@ def clear(self, re_read=False):
|
||||
"""Clear the contents of the environment
|
||||
|
||||
Arguments:
|
||||
re_read: If ``True``, do not clear ``new_specs``. This value cannot be read from yaml,
|
||||
and needs to be maintained when re-reading an existing environment.
|
||||
re_read (bool): If True, do not clear ``new_specs`` nor
|
||||
``new_installs`` values. These values cannot be read from
|
||||
yaml, and need to be maintained when re-reading an existing
|
||||
environment.
|
||||
"""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
self.spec_lists[user_speclist_name] = SpecList()
|
||||
@@ -975,6 +938,24 @@ def clear(self, re_read=False):
|
||||
if not re_read:
|
||||
# things that cannot be recreated from file
|
||||
self.new_specs = [] # write packages for these on write()
|
||||
self.new_installs = [] # write modules for these on write()
|
||||
|
||||
@property
|
||||
def internal(self):
|
||||
"""Whether this environment is managed by Spack."""
|
||||
return self.path.startswith(env_root_path())
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Human-readable representation of the environment.
|
||||
|
||||
This is the path for directory environments, and just the name
|
||||
for managed environments.
|
||||
"""
|
||||
if self.internal:
|
||||
return os.path.basename(self.path)
|
||||
else:
|
||||
return self.path
|
||||
|
||||
@property
|
||||
def active(self):
|
||||
@@ -1003,9 +984,23 @@ def _lock_backup_v1_path(self):
|
||||
"""Path to backup of v1 lockfile before conversion to v2"""
|
||||
return self.lock_path + ".backup.v1"
|
||||
|
||||
@property
|
||||
def env_subdir_path(self):
|
||||
"""Path to directory where the env stores repos, logs, views."""
|
||||
return os.path.join(self.path, env_subdir_name)
|
||||
|
||||
@property
|
||||
def repos_path(self):
|
||||
return os.path.join(self.env_subdir_path, "repos")
|
||||
return os.path.join(self.path, env_subdir_name, "repos")
|
||||
|
||||
@property
|
||||
def log_path(self):
|
||||
return os.path.join(self.path, env_subdir_name, "logs")
|
||||
|
||||
@property
|
||||
def config_stage_dir(self):
|
||||
"""Directory for any staged configuration file(s)."""
|
||||
return os.path.join(self.env_subdir_path, "config")
|
||||
|
||||
@property
|
||||
def view_path_default(self):
|
||||
@@ -1018,10 +1013,122 @@ def repo(self):
|
||||
self._repo = make_repo_path(self.repos_path)
|
||||
return self._repo
|
||||
|
||||
@property
|
||||
def scope_name(self):
|
||||
def included_config_scopes(self):
|
||||
"""List of included configuration scopes from the environment.
|
||||
|
||||
Scopes are listed in the YAML file in order from highest to
|
||||
lowest precedence, so configuration from earlier scope will take
|
||||
precedence over later ones.
|
||||
|
||||
This routine returns them in the order they should be pushed onto
|
||||
the internal scope stack (so, in reverse, from lowest to highest).
|
||||
"""
|
||||
scopes = []
|
||||
|
||||
# load config scopes added via 'include:', in reverse so that
|
||||
# highest-precedence scopes are last.
|
||||
includes = self.manifest[TOP_LEVEL_KEY].get("include", [])
|
||||
missing = []
|
||||
for i, config_path in enumerate(reversed(includes)):
|
||||
# allow paths to contain spack config/environment variables, etc.
|
||||
config_path = substitute_path_variables(config_path)
|
||||
|
||||
include_url = urllib.parse.urlparse(config_path)
|
||||
|
||||
# Transform file:// URLs to direct includes.
|
||||
if include_url.scheme == "file":
|
||||
config_path = urllib.request.url2pathname(include_url.path)
|
||||
|
||||
# Any other URL should be fetched.
|
||||
elif include_url.scheme in ("http", "https", "ftp"):
|
||||
# Stage any remote configuration file(s)
|
||||
staged_configs = (
|
||||
os.listdir(self.config_stage_dir)
|
||||
if os.path.exists(self.config_stage_dir)
|
||||
else []
|
||||
)
|
||||
remote_path = urllib.request.url2pathname(include_url.path)
|
||||
basename = os.path.basename(remote_path)
|
||||
if basename in staged_configs:
|
||||
# Do NOT re-stage configuration files over existing
|
||||
# ones with the same name since there is a risk of
|
||||
# losing changes (e.g., from 'spack config update').
|
||||
tty.warn(
|
||||
"Will not re-stage configuration from {0} to avoid "
|
||||
"losing changes to the already staged file of the "
|
||||
"same name.".format(remote_path)
|
||||
)
|
||||
|
||||
# Recognize the configuration stage directory
|
||||
# is flattened to ensure a single copy of each
|
||||
# configuration file.
|
||||
config_path = self.config_stage_dir
|
||||
if basename.endswith(".yaml"):
|
||||
config_path = os.path.join(config_path, basename)
|
||||
else:
|
||||
staged_path = spack.config.fetch_remote_configs(
|
||||
config_path, self.config_stage_dir, skip_existing=True
|
||||
)
|
||||
if not staged_path:
|
||||
raise SpackEnvironmentError(
|
||||
"Unable to fetch remote configuration {0}".format(config_path)
|
||||
)
|
||||
config_path = staged_path
|
||||
|
||||
elif include_url.scheme:
|
||||
raise ValueError(
|
||||
f"Unsupported URL scheme ({include_url.scheme}) for "
|
||||
f"environment include: {config_path}"
|
||||
)
|
||||
|
||||
# treat relative paths as relative to the environment
|
||||
if not os.path.isabs(config_path):
|
||||
config_path = os.path.join(self.path, config_path)
|
||||
config_path = os.path.normpath(os.path.realpath(config_path))
|
||||
|
||||
if os.path.isdir(config_path):
|
||||
# directories are treated as regular ConfigScopes
|
||||
config_name = "env:%s:%s" % (self.name, os.path.basename(config_path))
|
||||
tty.debug("Creating ConfigScope {0} for '{1}'".format(config_name, config_path))
|
||||
scope = spack.config.ConfigScope(config_name, config_path)
|
||||
elif os.path.exists(config_path):
|
||||
# files are assumed to be SingleFileScopes
|
||||
config_name = "env:%s:%s" % (self.name, config_path)
|
||||
tty.debug(
|
||||
"Creating SingleFileScope {0} for '{1}'".format(config_name, config_path)
|
||||
)
|
||||
scope = spack.config.SingleFileScope(
|
||||
config_name, config_path, spack.schema.merged.schema
|
||||
)
|
||||
else:
|
||||
missing.append(config_path)
|
||||
continue
|
||||
|
||||
scopes.append(scope)
|
||||
|
||||
if missing:
|
||||
msg = "Detected {0} missing include path(s):".format(len(missing))
|
||||
msg += "\n {0}".format("\n ".join(missing))
|
||||
raise spack.config.ConfigFileError(msg)
|
||||
|
||||
return scopes
|
||||
|
||||
def env_file_config_scope_name(self):
|
||||
"""Name of the config scope of this environment's manifest file."""
|
||||
return self.manifest.scope_name
|
||||
return "env:%s" % self.name
|
||||
|
||||
def env_file_config_scope(self):
|
||||
"""Get the configuration scope for the environment's manifest file."""
|
||||
config_name = self.env_file_config_scope_name()
|
||||
return spack.config.SingleFileScope(
|
||||
config_name, self.manifest_path, spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||
)
|
||||
|
||||
def config_scopes(self):
|
||||
"""A list of all configuration scopes for this environment."""
|
||||
return check_disallowed_env_config_mods(
|
||||
self.included_config_scopes() + [self.env_file_config_scope()]
|
||||
)
|
||||
|
||||
def destroy(self):
|
||||
"""Remove this environment from Spack entirely."""
|
||||
@@ -1121,7 +1228,7 @@ def change_existing_spec(
|
||||
|
||||
for idx, spec in matches:
|
||||
override_spec = Spec.override(spec, change_spec)
|
||||
self.spec_lists[list_name].replace(idx, str(override_spec))
|
||||
self.spec_lists[list_name].specs[idx] = override_spec
|
||||
if list_name == user_speclist_name:
|
||||
self.manifest.override_user_spec(str(override_spec), idx=idx)
|
||||
else:
|
||||
@@ -1129,6 +1236,7 @@ def change_existing_spec(
|
||||
str(spec), override=str(override_spec), list_name=list_name
|
||||
)
|
||||
self.update_stale_references(from_list=list_name)
|
||||
self._construct_state_from_manifest()
|
||||
|
||||
def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
"""Remove specs from an environment that match a query_spec"""
|
||||
@@ -1699,8 +1807,8 @@ def _add_concrete_spec(self, spec, concrete, new=True):
|
||||
self.concretized_order.append(h)
|
||||
self.specs_by_hash[h] = concrete
|
||||
|
||||
def _dev_specs_that_need_overwrite(self):
|
||||
"""Return the hashes of all specs that need to be reinstalled due to source code change."""
|
||||
def _get_overwrite_specs(self):
|
||||
# Find all dev specs that were modified.
|
||||
changed_dev_specs = [
|
||||
s
|
||||
for s in traverse.traverse_nodes(
|
||||
@@ -1725,6 +1833,21 @@ def _dev_specs_that_need_overwrite(self):
|
||||
if depth == 0 or spec.installed
|
||||
]
|
||||
|
||||
def _install_log_links(self, spec):
|
||||
if not spec.external:
|
||||
# Make sure log directory exists
|
||||
log_path = self.log_path
|
||||
fs.mkdirp(log_path)
|
||||
|
||||
with fs.working_dir(self.path):
|
||||
# Link the resulting log file into logs dir
|
||||
build_log_link = os.path.join(
|
||||
log_path, "%s-%s.log" % (spec.name, spec.dag_hash(7))
|
||||
)
|
||||
if os.path.lexists(build_log_link):
|
||||
os.remove(build_log_link)
|
||||
symlink(spec.package.build_log_path, build_log_link)
|
||||
|
||||
def _partition_roots_by_install_status(self):
|
||||
"""Partition root specs into those that do not have to be passed to the
|
||||
installer, and those that should be, taking into account development
|
||||
@@ -1758,18 +1881,58 @@ def install_all(self, **install_args):
|
||||
"""
|
||||
self.install_specs(None, **install_args)
|
||||
|
||||
def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
|
||||
roots = self.concrete_roots()
|
||||
specs = specs if specs is not None else roots
|
||||
def install_specs(self, specs=None, **install_args):
|
||||
tty.debug("Assessing installation status of environment packages")
|
||||
# If "spack install" is invoked repeatedly for a large environment
|
||||
# where all specs are already installed, the operation can take
|
||||
# a large amount of time due to repeatedly acquiring and releasing
|
||||
# locks. As a small optimization, drop already installed root specs.
|
||||
installed_roots, uninstalled_roots = self._partition_roots_by_install_status()
|
||||
if specs:
|
||||
specs_to_install = [s for s in specs if s not in installed_roots]
|
||||
specs_dropped = [s for s in specs if s in installed_roots]
|
||||
else:
|
||||
specs_to_install = uninstalled_roots
|
||||
specs_dropped = installed_roots
|
||||
|
||||
# Extend the set of specs to overwrite with modified dev specs and their parents
|
||||
install_args["overwrite"] = (
|
||||
install_args.get("overwrite", []) + self._dev_specs_that_need_overwrite()
|
||||
)
|
||||
# We need to repeat the work of the installer thanks to the above optimization:
|
||||
# Already installed root specs should be marked explicitly installed in the
|
||||
# database.
|
||||
if specs_dropped:
|
||||
with spack.store.STORE.db.write_transaction(): # do all in one transaction
|
||||
for spec in specs_dropped:
|
||||
spack.store.STORE.db.update_explicit(spec, True)
|
||||
|
||||
installs = [(spec.package, {**install_args, "explicit": spec in roots}) for spec in specs]
|
||||
if not specs_to_install:
|
||||
tty.msg("All of the packages are already installed")
|
||||
else:
|
||||
tty.debug("Processing {0} uninstalled specs".format(len(specs_to_install)))
|
||||
|
||||
PackageInstaller(installs).install()
|
||||
specs_to_overwrite = self._get_overwrite_specs()
|
||||
tty.debug("{0} specs need to be overwritten".format(len(specs_to_overwrite)))
|
||||
|
||||
install_args["overwrite"] = install_args.get("overwrite", []) + specs_to_overwrite
|
||||
|
||||
installs = []
|
||||
for spec in specs_to_install:
|
||||
pkg_install_args = install_args.copy()
|
||||
pkg_install_args["explicit"] = spec in self.roots()
|
||||
installs.append((spec.package, pkg_install_args))
|
||||
|
||||
try:
|
||||
builder = PackageInstaller(installs)
|
||||
builder.install()
|
||||
finally:
|
||||
# Ensure links are set appropriately
|
||||
for spec in specs_to_install:
|
||||
if spec.installed:
|
||||
self.new_installs.append(spec)
|
||||
try:
|
||||
self._install_log_links(spec)
|
||||
except OSError as e:
|
||||
tty.warn(
|
||||
"Could not install log links for {0}: {1}".format(spec.name, str(e))
|
||||
)
|
||||
|
||||
def all_specs_generator(self) -> Iterable[Spec]:
|
||||
"""Returns a generator for all concrete specs"""
|
||||
@@ -2090,8 +2253,13 @@ def write(self, regenerate: bool = True) -> None:
|
||||
|
||||
if regenerate:
|
||||
self.regenerate_views()
|
||||
spack.hooks.post_env_write(self)
|
||||
|
||||
self.new_specs.clear()
|
||||
self._reset_new_specs_and_installs()
|
||||
|
||||
def _reset_new_specs_and_installs(self) -> None:
|
||||
self.new_specs = []
|
||||
self.new_installs = []
|
||||
|
||||
def update_lockfile(self) -> None:
|
||||
with fs.write_tmp_and_move(self.lock_path) as f:
|
||||
@@ -2212,7 +2380,7 @@ def _tree_to_display(spec):
|
||||
return spec.tree(
|
||||
recurse_dependencies=True,
|
||||
format=spack.spec.DISPLAY_FORMAT,
|
||||
install_status=True,
|
||||
status_fn=spack.spec.Spec.install_status,
|
||||
hashlen=7,
|
||||
hashes=True,
|
||||
)
|
||||
@@ -2289,6 +2457,18 @@ def make_repo_path(root):
|
||||
return path
|
||||
|
||||
|
||||
def prepare_config_scope(env):
|
||||
"""Add env's scope to the global configuration search path."""
|
||||
for scope in env.config_scopes():
|
||||
spack.config.CONFIG.push_scope(scope)
|
||||
|
||||
|
||||
def deactivate_config_scope(env):
|
||||
"""Remove any scopes from env from the global config path."""
|
||||
for scope in env.config_scopes():
|
||||
spack.config.CONFIG.remove_scope(scope.name)
|
||||
|
||||
|
||||
def manifest_file(env_name_or_dir):
|
||||
"""Return the absolute path to a manifest file given the environment
|
||||
name or directory.
|
||||
@@ -2467,9 +2647,8 @@ def from_lockfile(manifest_dir: Union[pathlib.Path, str]) -> "EnvironmentManifes
|
||||
already existing in the directory.
|
||||
|
||||
Args:
|
||||
manifest_dir: directory containing the manifest and lockfile
|
||||
manifest_dir: directory where the lockfile is
|
||||
"""
|
||||
# TBD: Should this be the abspath?
|
||||
manifest_dir = pathlib.Path(manifest_dir)
|
||||
lockfile = manifest_dir / lockfile_name
|
||||
with lockfile.open("r") as f:
|
||||
@@ -2487,8 +2666,6 @@ def from_lockfile(manifest_dir: Union[pathlib.Path, str]) -> "EnvironmentManifes
|
||||
def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
|
||||
self.manifest_dir = pathlib.Path(manifest_dir)
|
||||
self.manifest_file = self.manifest_dir / manifest_name
|
||||
self.scope_name = f"env:{environment_name(self.manifest_dir)}"
|
||||
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
|
||||
|
||||
if not self.manifest_file.exists():
|
||||
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
|
||||
@@ -2725,145 +2902,6 @@ def __iter__(self):
|
||||
def __str__(self):
|
||||
return str(self.manifest_file)
|
||||
|
||||
@property
|
||||
def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
"""List of included configuration scopes from the manifest.
|
||||
|
||||
Scopes are listed in the YAML file in order from highest to
|
||||
lowest precedence, so configuration from earlier scope will take
|
||||
precedence over later ones.
|
||||
|
||||
This routine returns them in the order they should be pushed onto
|
||||
the internal scope stack (so, in reverse, from lowest to highest).
|
||||
|
||||
Returns: Configuration scopes associated with the environment manifest
|
||||
|
||||
Raises:
|
||||
SpackEnvironmentError: if the manifest includes a remote file but
|
||||
no configuration stage directory has been identified
|
||||
"""
|
||||
scopes = []
|
||||
|
||||
# load config scopes added via 'include:', in reverse so that
|
||||
# highest-precedence scopes are last.
|
||||
includes = self[TOP_LEVEL_KEY].get("include", [])
|
||||
env_name = environment_name(self.manifest_dir)
|
||||
missing = []
|
||||
for i, config_path in enumerate(reversed(includes)):
|
||||
# allow paths to contain spack config/environment variables, etc.
|
||||
config_path = substitute_path_variables(config_path)
|
||||
|
||||
include_url = urllib.parse.urlparse(config_path)
|
||||
|
||||
# Transform file:// URLs to direct includes.
|
||||
if include_url.scheme == "file":
|
||||
config_path = urllib.request.url2pathname(include_url.path)
|
||||
|
||||
# Any other URL should be fetched.
|
||||
elif include_url.scheme in ("http", "https", "ftp"):
|
||||
# Stage any remote configuration file(s)
|
||||
staged_configs = (
|
||||
os.listdir(self.config_stage_dir)
|
||||
if os.path.exists(self.config_stage_dir)
|
||||
else []
|
||||
)
|
||||
remote_path = urllib.request.url2pathname(include_url.path)
|
||||
basename = os.path.basename(remote_path)
|
||||
if basename in staged_configs:
|
||||
# Do NOT re-stage configuration files over existing
|
||||
# ones with the same name since there is a risk of
|
||||
# losing changes (e.g., from 'spack config update').
|
||||
tty.warn(
|
||||
"Will not re-stage configuration from {0} to avoid "
|
||||
"losing changes to the already staged file of the "
|
||||
"same name.".format(remote_path)
|
||||
)
|
||||
|
||||
# Recognize the configuration stage directory
|
||||
# is flattened to ensure a single copy of each
|
||||
# configuration file.
|
||||
config_path = self.config_stage_dir
|
||||
if basename.endswith(".yaml"):
|
||||
config_path = os.path.join(config_path, basename)
|
||||
else:
|
||||
staged_path = spack.config.fetch_remote_configs(
|
||||
config_path, str(self.config_stage_dir), skip_existing=True
|
||||
)
|
||||
if not staged_path:
|
||||
raise SpackEnvironmentError(
|
||||
"Unable to fetch remote configuration {0}".format(config_path)
|
||||
)
|
||||
config_path = staged_path
|
||||
|
||||
elif include_url.scheme:
|
||||
raise ValueError(
|
||||
f"Unsupported URL scheme ({include_url.scheme}) for "
|
||||
f"environment include: {config_path}"
|
||||
)
|
||||
|
||||
# treat relative paths as relative to the environment
|
||||
if not os.path.isabs(config_path):
|
||||
config_path = os.path.join(self.manifest_dir, config_path)
|
||||
config_path = os.path.normpath(os.path.realpath(config_path))
|
||||
|
||||
if os.path.isdir(config_path):
|
||||
# directories are treated as regular ConfigScopes
|
||||
config_name = "env:%s:%s" % (env_name, os.path.basename(config_path))
|
||||
tty.debug("Creating ConfigScope {0} for '{1}'".format(config_name, config_path))
|
||||
scope = spack.config.ConfigScope(config_name, config_path)
|
||||
elif os.path.exists(config_path):
|
||||
# files are assumed to be SingleFileScopes
|
||||
config_name = "env:%s:%s" % (env_name, config_path)
|
||||
tty.debug(
|
||||
"Creating SingleFileScope {0} for '{1}'".format(config_name, config_path)
|
||||
)
|
||||
scope = spack.config.SingleFileScope(
|
||||
config_name, config_path, spack.schema.merged.schema
|
||||
)
|
||||
else:
|
||||
missing.append(config_path)
|
||||
continue
|
||||
|
||||
scopes.append(scope)
|
||||
|
||||
if missing:
|
||||
msg = "Detected {0} missing include path(s):".format(len(missing))
|
||||
msg += "\n {0}".format("\n ".join(missing))
|
||||
raise spack.config.ConfigFileError(msg)
|
||||
|
||||
return scopes
|
||||
|
||||
@property
|
||||
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
"""A list of all configuration scopes for the environment manifest.
|
||||
|
||||
Returns: All configuration scopes associated with the environment
|
||||
"""
|
||||
config_name = self.scope_name
|
||||
env_scope = spack.config.SingleFileScope(
|
||||
config_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||
)
|
||||
|
||||
return check_disallowed_env_config_mods(self.included_config_scopes + [env_scope])
|
||||
|
||||
def prepare_config_scope(self) -> None:
|
||||
"""Add the manifest's scopes to the global configuration search path."""
|
||||
for scope in self.env_config_scopes:
|
||||
spack.config.CONFIG.push_scope(scope)
|
||||
|
||||
def deactivate_config_scope(self) -> None:
|
||||
"""Remove any of the manifest's scopes from the global config path."""
|
||||
for scope in self.env_config_scopes:
|
||||
spack.config.CONFIG.remove_scope(scope.name)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_config(self):
|
||||
"""Ensure only the manifest's configuration scopes are global."""
|
||||
with no_active_environment():
|
||||
self.prepare_config_scope()
|
||||
yield
|
||||
self.deactivate_config_scope()
|
||||
|
||||
|
||||
class SpackEnvironmentError(spack.error.SpackError):
|
||||
"""Superclass for all errors to do with Spack environments."""
|
||||
|
||||
@@ -30,7 +30,6 @@
|
||||
import shutil
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
from pathlib import PurePath
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.url
|
||||
@@ -38,14 +37,13 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import comma_and, quote
|
||||
from llnl.util.filesystem import get_single_file, mkdirp, temp_cwd, working_dir
|
||||
from llnl.util.filesystem import get_single_file, mkdirp, temp_cwd, temp_rename, working_dir
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.oci.opener
|
||||
import spack.url
|
||||
import spack.util.archive
|
||||
import spack.util.crypto as crypto
|
||||
import spack.util.git
|
||||
import spack.util.url as url_util
|
||||
@@ -602,21 +600,29 @@ def expand(self):
|
||||
tty.debug("Source fetched with %s is already expanded." % self.url_attr)
|
||||
|
||||
@_needs_stage
|
||||
def archive(self, destination, *, exclude: Optional[str] = None):
|
||||
def archive(self, destination, **kwargs):
|
||||
assert llnl.url.extension_from_path(destination) == "tar.gz"
|
||||
assert self.stage.source_path.startswith(self.stage.path)
|
||||
# We need to prepend this dir name to every entry of the tarfile
|
||||
top_level_dir = PurePath(self.stage.srcdir or os.path.basename(self.stage.source_path))
|
||||
|
||||
with working_dir(self.stage.source_path), spack.util.archive.gzip_compressed_tarfile(
|
||||
destination
|
||||
) as (tar, _, _):
|
||||
spack.util.archive.reproducible_tarfile_from_prefix(
|
||||
tar=tar,
|
||||
prefix=".",
|
||||
skip=lambda entry: entry.name == exclude,
|
||||
path_to_name=lambda path: (top_level_dir / PurePath(path)).as_posix(),
|
||||
)
|
||||
tar = which("tar", required=True)
|
||||
|
||||
patterns = kwargs.get("exclude", None)
|
||||
if patterns is not None:
|
||||
if isinstance(patterns, str):
|
||||
patterns = [patterns]
|
||||
for p in patterns:
|
||||
tar.add_default_arg("--exclude=%s" % p)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
if self.stage.srcdir:
|
||||
# Here we create an archive with the default repository name.
|
||||
# The 'tar' command has options for changing the name of a
|
||||
# directory that is included in the archive, but they differ
|
||||
# based on OS, so we temporarily rename the repo
|
||||
with temp_rename(self.stage.source_path, self.stage.srcdir):
|
||||
tar("-czf", destination, self.stage.srcdir)
|
||||
else:
|
||||
tar("-czf", destination, os.path.basename(self.stage.source_path))
|
||||
|
||||
def __str__(self):
|
||||
return "VCS: %s" % self.url
|
||||
@@ -697,6 +703,7 @@ def __str__(self):
|
||||
|
||||
@fetcher
|
||||
class GitFetchStrategy(VCSFetchStrategy):
|
||||
|
||||
"""
|
||||
Fetch strategy that gets source code from a git repository.
|
||||
Use like this in a package:
|
||||
@@ -929,12 +936,9 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
git_commands = []
|
||||
submodules = self.submodules
|
||||
if callable(submodules):
|
||||
submodules = submodules(self.package)
|
||||
if submodules:
|
||||
if isinstance(submodules, str):
|
||||
submodules = [submodules]
|
||||
git_commands.append(["submodule", "init", "--"] + submodules)
|
||||
git_commands.append(["submodule", "update", "--recursive"])
|
||||
submodules = list(submodules(self.package))
|
||||
git_commands.append(["submodule", "init", "--"] + submodules)
|
||||
git_commands.append(["submodule", "update", "--recursive"])
|
||||
elif submodules:
|
||||
git_commands.append(["submodule", "update", "--init", "--recursive"])
|
||||
|
||||
@@ -1091,6 +1095,7 @@ def __str__(self):
|
||||
|
||||
@fetcher
|
||||
class SvnFetchStrategy(VCSFetchStrategy):
|
||||
|
||||
"""Fetch strategy that gets source code from a subversion repository.
|
||||
Use like this in a package:
|
||||
|
||||
@@ -1185,6 +1190,7 @@ def __str__(self):
|
||||
|
||||
@fetcher
|
||||
class HgFetchStrategy(VCSFetchStrategy):
|
||||
|
||||
"""
|
||||
Fetch strategy that gets source code from a Mercurial repository.
|
||||
Use like this in a package:
|
||||
|
||||
@@ -32,7 +32,6 @@
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.config
|
||||
import spack.paths
|
||||
import spack.projections
|
||||
import spack.relocate
|
||||
import spack.schema.projections
|
||||
@@ -92,16 +91,16 @@ def view_copy(src: str, dst: str, view, spec: Optional[spack.spec.Spec] = None):
|
||||
prefix_to_projection[spack.store.STORE.layout.root] = view._root
|
||||
|
||||
# This is vestigial code for the *old* location of sbang.
|
||||
prefix_to_projection[f"#!/bin/bash {spack.paths.spack_root}/bin/sbang"] = (
|
||||
sbang.sbang_shebang_line()
|
||||
)
|
||||
prefix_to_projection[
|
||||
"#!/bin/bash {0}/bin/sbang".format(spack.paths.spack_root)
|
||||
] = sbang.sbang_shebang_line()
|
||||
|
||||
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
|
||||
|
||||
try:
|
||||
os.chown(dst, src_stat.st_uid, src_stat.st_gid)
|
||||
except OSError:
|
||||
tty.debug(f"Can't change the permissions for {dst}")
|
||||
tty.debug("Can't change the permissions for %s" % dst)
|
||||
|
||||
|
||||
def view_func_parser(parsed_name):
|
||||
@@ -113,7 +112,7 @@ def view_func_parser(parsed_name):
|
||||
elif parsed_name in ("add", "symlink", "soft"):
|
||||
return view_symlink
|
||||
else:
|
||||
raise ValueError(f"invalid link type for view: '{parsed_name}'")
|
||||
raise ValueError("invalid link type for view: '%s'" % parsed_name)
|
||||
|
||||
|
||||
def inverse_view_func_parser(view_type):
|
||||
@@ -271,10 +270,9 @@ def __init__(self, root, layout, **kwargs):
|
||||
# Ensure projections are the same from each source
|
||||
# Read projections file from view
|
||||
if self.projections != self.read_projections():
|
||||
raise ConflictingProjectionsError(
|
||||
f"View at {self._root} has projections file"
|
||||
" which does not match projections passed manually."
|
||||
)
|
||||
msg = "View at %s has projections file" % self._root
|
||||
msg += " which does not match projections passed manually."
|
||||
raise ConflictingProjectionsError(msg)
|
||||
|
||||
self._croot = colorize_root(self._root) + " "
|
||||
|
||||
@@ -315,11 +313,11 @@ def add_specs(self, *specs, **kwargs):
|
||||
|
||||
def add_standalone(self, spec):
|
||||
if spec.external:
|
||||
tty.warn(f"{self._croot}Skipping external package: {colorize_spec(spec)}")
|
||||
tty.warn(self._croot + "Skipping external package: %s" % colorize_spec(spec))
|
||||
return True
|
||||
|
||||
if self.check_added(spec):
|
||||
tty.warn(f"{self._croot}Skipping already linked package: {colorize_spec(spec)}")
|
||||
tty.warn(self._croot + "Skipping already linked package: %s" % colorize_spec(spec))
|
||||
return True
|
||||
|
||||
self.merge(spec)
|
||||
@@ -327,7 +325,7 @@ def add_standalone(self, spec):
|
||||
self.link_meta_folder(spec)
|
||||
|
||||
if self.verbose:
|
||||
tty.info(f"{self._croot}Linked package: {colorize_spec(spec)}")
|
||||
tty.info(self._croot + "Linked package: %s" % colorize_spec(spec))
|
||||
return True
|
||||
|
||||
def merge(self, spec, ignore=None):
|
||||
@@ -395,7 +393,7 @@ def needs_file(spec, file):
|
||||
|
||||
for file in files:
|
||||
if not os.path.lexists(file):
|
||||
tty.warn(f"Tried to remove {file} which does not exist")
|
||||
tty.warn("Tried to remove %s which does not exist" % file)
|
||||
continue
|
||||
|
||||
# remove if file is not owned by any other package in the view
|
||||
@@ -406,7 +404,7 @@ def needs_file(spec, file):
|
||||
# we are currently removing, as we remove files before unlinking the
|
||||
# metadata directory.
|
||||
if len([s for s in specs if needs_file(s, file)]) <= 1:
|
||||
tty.debug(f"Removing file {file}")
|
||||
tty.debug("Removing file " + file)
|
||||
os.remove(file)
|
||||
|
||||
def check_added(self, spec):
|
||||
@@ -479,14 +477,14 @@ def remove_standalone(self, spec):
|
||||
Remove (unlink) a standalone package from this view.
|
||||
"""
|
||||
if not self.check_added(spec):
|
||||
tty.warn(f"{self._croot}Skipping package not linked in view: {spec.name}")
|
||||
tty.warn(self._croot + "Skipping package not linked in view: %s" % spec.name)
|
||||
return
|
||||
|
||||
self.unmerge(spec)
|
||||
self.unlink_meta_folder(spec)
|
||||
|
||||
if self.verbose:
|
||||
tty.info(f"{self._croot}Removed package: {colorize_spec(spec)}")
|
||||
tty.info(self._croot + "Removed package: %s" % colorize_spec(spec))
|
||||
|
||||
def get_projection_for_spec(self, spec):
|
||||
"""
|
||||
@@ -560,9 +558,9 @@ def print_conflict(self, spec_active, spec_specified, level="error"):
|
||||
linked = tty.color.colorize(" (@gLinked@.)", color=color)
|
||||
specified = tty.color.colorize("(@rSpecified@.)", color=color)
|
||||
cprint(
|
||||
f"{self._croot}Package conflict detected:\n"
|
||||
f"{linked} {colorize_spec(spec_active)}\n"
|
||||
f"{specified} {colorize_spec(spec_specified)}"
|
||||
self._croot + "Package conflict detected:\n"
|
||||
"%s %s\n" % (linked, colorize_spec(spec_active))
|
||||
+ "%s %s" % (specified, colorize_spec(spec_specified))
|
||||
)
|
||||
|
||||
def print_status(self, *specs, **kwargs):
|
||||
@@ -574,14 +572,14 @@ def print_status(self, *specs, **kwargs):
|
||||
|
||||
for s, v in zip(specs, in_view):
|
||||
if not v:
|
||||
tty.error(f"{self._croot}Package not linked: {s.name}")
|
||||
tty.error(self._croot + "Package not linked: %s" % s.name)
|
||||
elif s != v:
|
||||
self.print_conflict(v, s, level="warn")
|
||||
|
||||
in_view = list(filter(None, in_view))
|
||||
|
||||
if len(specs) > 0:
|
||||
tty.msg(f"Packages linked in {self._croot[:-1]}:")
|
||||
tty.msg("Packages linked in %s:" % self._croot[:-1])
|
||||
|
||||
# Make a dict with specs keyed by architecture and compiler.
|
||||
index = index_by(specs, ("architecture", "compiler"))
|
||||
@@ -591,19 +589,20 @@ def print_status(self, *specs, **kwargs):
|
||||
if i > 0:
|
||||
print()
|
||||
|
||||
header = (
|
||||
f"{spack.spec.ARCHITECTURE_COLOR}{{{architecture}}} "
|
||||
f"/ {spack.spec.COMPILER_COLOR}{{{compiler}}}"
|
||||
header = "%s{%s} / %s{%s}" % (
|
||||
spack.spec.ARCHITECTURE_COLOR,
|
||||
architecture,
|
||||
spack.spec.COMPILER_COLOR,
|
||||
compiler,
|
||||
)
|
||||
tty.hline(colorize(header), char="-")
|
||||
|
||||
specs = index[(architecture, compiler)]
|
||||
specs.sort()
|
||||
|
||||
abbreviated = [
|
||||
s.cformat("{name}{@version}{%compiler}{compiler_flags}{variants}")
|
||||
for s in specs
|
||||
]
|
||||
format_string = "{name}{@version}"
|
||||
format_string += "{%compiler}{compiler_flags}{variants}"
|
||||
abbreviated = [s.cformat(format_string) for s in specs]
|
||||
|
||||
# Print one spec per line along with prefix path
|
||||
width = max(len(s) for s in abbreviated)
|
||||
@@ -635,19 +634,22 @@ def unlink_meta_folder(self, spec):
|
||||
|
||||
|
||||
class SimpleFilesystemView(FilesystemView):
|
||||
"""A simple and partial implementation of FilesystemView focused on performance and immutable
|
||||
views, where specs cannot be removed after they were added."""
|
||||
"""A simple and partial implementation of FilesystemView focused on
|
||||
performance and immutable views, where specs cannot be removed after they
|
||||
were added."""
|
||||
|
||||
def __init__(self, root, layout, **kwargs):
|
||||
super().__init__(root, layout, **kwargs)
|
||||
|
||||
def _sanity_check_view_projection(self, specs):
|
||||
"""A very common issue is that we end up with two specs of the same package, that project
|
||||
to the same prefix. We want to catch that as early as possible and give a sensible error to
|
||||
the user. Here we use the metadata dir (.spack) projection as a quick test to see whether
|
||||
two specs in the view are going to clash. The metadata dir is used because it's always
|
||||
added by Spack with identical files, so a guaranteed clash that's easily verified."""
|
||||
seen = {}
|
||||
"""A very common issue is that we end up with two specs of the same
|
||||
package, that project to the same prefix. We want to catch that as
|
||||
early as possible and give a sensible error to the user. Here we use
|
||||
the metadata dir (.spack) projection as a quick test to see whether
|
||||
two specs in the view are going to clash. The metadata dir is used
|
||||
because it's always added by Spack with identical files, so a
|
||||
guaranteed clash that's easily verified."""
|
||||
seen = dict()
|
||||
for current_spec in specs:
|
||||
metadata_dir = self.relative_metadata_dir_for_spec(current_spec)
|
||||
conflicting_spec = seen.get(metadata_dir)
|
||||
@@ -655,8 +657,7 @@ def _sanity_check_view_projection(self, specs):
|
||||
raise ConflictingSpecsError(current_spec, conflicting_spec)
|
||||
seen[metadata_dir] = current_spec
|
||||
|
||||
def add_specs(self, *specs: spack.spec.Spec) -> None:
|
||||
"""Link a root-to-leaf topologically ordered list of specs into the view."""
|
||||
def add_specs(self, *specs, **kwargs):
|
||||
assert all((s.concrete for s in specs))
|
||||
if len(specs) == 0:
|
||||
return
|
||||
@@ -667,6 +668,9 @@ def add_specs(self, *specs: spack.spec.Spec) -> None:
|
||||
tty.warn("Skipping external package: " + s.short_spec)
|
||||
specs = [s for s in specs if not s.external]
|
||||
|
||||
if kwargs.get("exclude", None):
|
||||
specs = set(filter_exclude(specs, kwargs["exclude"]))
|
||||
|
||||
self._sanity_check_view_projection(specs)
|
||||
|
||||
# Ignore spack meta data folder.
|
||||
@@ -691,11 +695,13 @@ def skip_list(file):
|
||||
# Inform about file-file conflicts.
|
||||
if visitor.file_conflicts:
|
||||
if self.ignore_conflicts:
|
||||
tty.debug(f"{len(visitor.file_conflicts)} file conflicts")
|
||||
tty.debug("{0} file conflicts".format(len(visitor.file_conflicts)))
|
||||
else:
|
||||
raise MergeConflictSummary(visitor.file_conflicts)
|
||||
|
||||
tty.debug(f"Creating {len(visitor.directories)} dirs and {len(visitor.files)} links")
|
||||
tty.debug(
|
||||
"Creating {0} dirs and {1} links".format(len(visitor.directories), len(visitor.files))
|
||||
)
|
||||
|
||||
# Make the directory structure
|
||||
for dst in visitor.directories:
|
||||
|
||||
@@ -15,6 +15,13 @@
|
||||
* post_install(spec, explicit)
|
||||
* pre_uninstall(spec)
|
||||
* post_uninstall(spec)
|
||||
* on_install_start(spec)
|
||||
* on_install_success(spec)
|
||||
* on_install_failure(spec)
|
||||
* on_phase_success(pkg, phase_name, log_file)
|
||||
* on_phase_error(pkg, phase_name, log_file)
|
||||
* on_phase_error(pkg, phase_name, log_file)
|
||||
* post_env_write(env)
|
||||
|
||||
This can be used to implement support for things like module
|
||||
systems (e.g. modules, lmod, etc.) or to add other custom
|
||||
@@ -71,5 +78,17 @@ def __call__(self, *args, **kwargs):
|
||||
pre_install = _HookRunner("pre_install")
|
||||
post_install = _HookRunner("post_install")
|
||||
|
||||
# These hooks are run within an install subprocess
|
||||
pre_uninstall = _HookRunner("pre_uninstall")
|
||||
post_uninstall = _HookRunner("post_uninstall")
|
||||
on_phase_success = _HookRunner("on_phase_success")
|
||||
on_phase_error = _HookRunner("on_phase_error")
|
||||
|
||||
# These are hooks in installer.py, before starting install subprocess
|
||||
on_install_start = _HookRunner("on_install_start")
|
||||
on_install_success = _HookRunner("on_install_success")
|
||||
on_install_failure = _HookRunner("on_install_failure")
|
||||
on_install_cancel = _HookRunner("on_install_cancel")
|
||||
|
||||
# Environment hooks
|
||||
post_env_write = _HookRunner("post_env_write")
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
from typing import BinaryIO, Optional, Tuple
|
||||
from typing import IO, Optional, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import BaseDirectoryVisitor, visit_directory_tree
|
||||
@@ -18,7 +18,7 @@ def should_keep(path: bytes) -> bool:
|
||||
return path.startswith(b"$") or (os.path.isabs(path) and os.path.lexists(path))
|
||||
|
||||
|
||||
def _drop_redundant_rpaths(f: BinaryIO) -> Optional[Tuple[bytes, bytes]]:
|
||||
def _drop_redundant_rpaths(f: IO) -> Optional[Tuple[bytes, bytes]]:
|
||||
"""Drop redundant entries from rpath.
|
||||
|
||||
Args:
|
||||
|
||||
@@ -34,8 +34,21 @@ def _for_each_enabled(
|
||||
|
||||
|
||||
def post_install(spec, explicit: bool):
|
||||
import spack.environment as ev # break import cycle
|
||||
|
||||
if ev.active_environment():
|
||||
# If the installed through an environment, we skip post_install
|
||||
# module generation and generate the modules on env_write so Spack
|
||||
# can manage interactions between env views and modules
|
||||
return
|
||||
|
||||
_for_each_enabled(spec, "write", explicit)
|
||||
|
||||
|
||||
def post_uninstall(spec):
|
||||
_for_each_enabled(spec, "remove")
|
||||
|
||||
|
||||
def post_env_write(env):
|
||||
for spec in env.new_installs:
|
||||
_for_each_enabled(spec, "write")
|
||||
|
||||
@@ -229,8 +229,6 @@ def post_install(spec, explicit=None):
|
||||
$spack_prefix/bin/sbang instead of something longer than the
|
||||
shebang limit.
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
return
|
||||
if spec.external:
|
||||
tty.debug("SKIP: shebang filtering [external package]")
|
||||
return
|
||||
|
||||
@@ -36,7 +36,6 @@
|
||||
import sys
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from gzip import GzipFile
|
||||
from typing import Dict, Iterator, List, Optional, Set, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -639,12 +638,13 @@ def archive_install_logs(pkg: "spack.package_base.PackageBase", phase_log_dir: s
|
||||
pkg: the package that was built and installed
|
||||
phase_log_dir: path to the archive directory
|
||||
"""
|
||||
# Copy a compressed version of the install log
|
||||
with open(pkg.log_path, "rb") as f, open(pkg.install_log_path, "wb") as g:
|
||||
# Use GzipFile directly so we can omit filename / mtime in header
|
||||
gzip_file = GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=g)
|
||||
shutil.copyfileobj(f, gzip_file)
|
||||
gzip_file.close()
|
||||
# Archive the whole stdout + stderr for the package
|
||||
fs.install(pkg.log_path, pkg.install_log_path)
|
||||
|
||||
# Archive all phase log paths
|
||||
for phase_log in pkg.phase_log_files:
|
||||
log_file = os.path.basename(phase_log)
|
||||
fs.install(phase_log, os.path.join(phase_log_dir, log_file))
|
||||
|
||||
# Archive the install-phase test log, if present
|
||||
pkg.archive_install_test_log()
|
||||
@@ -1705,6 +1705,7 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
except spack.build_environment.StopPhase as e:
|
||||
# A StopPhase exception means that do_install was asked to
|
||||
# stop early from clients, and is not an error at this point
|
||||
spack.hooks.on_install_failure(task.request.pkg.spec)
|
||||
pid = f"{self.pid}: " if tty.show_pid() else ""
|
||||
tty.debug(f"{pid}{str(e)}")
|
||||
tty.debug(f"Package stage directory: {pkg.stage.source_path}")
|
||||
@@ -2010,6 +2011,7 @@ def install(self) -> None:
|
||||
if task is None:
|
||||
continue
|
||||
|
||||
spack.hooks.on_install_start(task.request.pkg.spec)
|
||||
install_args = task.request.install_args
|
||||
keep_prefix = install_args.get("keep_prefix")
|
||||
|
||||
@@ -2035,6 +2037,9 @@ def install(self) -> None:
|
||||
tty.warn(f"{pkg_id} does NOT actually have any uninstalled deps left")
|
||||
dep_str = "dependencies" if task.priority > 1 else "dependency"
|
||||
|
||||
# Hook to indicate task failure, but without an exception
|
||||
spack.hooks.on_install_failure(task.request.pkg.spec)
|
||||
|
||||
raise InstallError(
|
||||
f"Cannot proceed with {pkg_id}: {task.priority} uninstalled "
|
||||
f"{dep_str}: {','.join(task.uninstalled_deps)}",
|
||||
@@ -2057,6 +2062,11 @@ def install(self) -> None:
|
||||
tty.warn(f"{pkg_id} failed to install")
|
||||
self._update_failed(task)
|
||||
|
||||
# Mark that the package failed
|
||||
# TODO: this should also be for the task.pkg, but we don't
|
||||
# model transitive yet.
|
||||
spack.hooks.on_install_failure(task.request.pkg.spec)
|
||||
|
||||
if self.fail_fast:
|
||||
raise InstallError(fail_fast_err, pkg=pkg)
|
||||
|
||||
@@ -2159,6 +2169,7 @@ def install(self) -> None:
|
||||
tty.error(
|
||||
f"Failed to install {pkg.name} due to " f"{exc.__class__.__name__}: {str(exc)}"
|
||||
)
|
||||
spack.hooks.on_install_cancel(task.request.pkg.spec)
|
||||
raise
|
||||
|
||||
except binary_distribution.NoChecksumException as exc:
|
||||
@@ -2177,6 +2188,7 @@ def install(self) -> None:
|
||||
|
||||
except (Exception, SystemExit) as exc:
|
||||
self._update_failed(task, True, exc)
|
||||
spack.hooks.on_install_failure(task.request.pkg.spec)
|
||||
|
||||
# Best effort installs suppress the exception and mark the
|
||||
# package as a failure.
|
||||
@@ -2360,6 +2372,9 @@ def run(self) -> bool:
|
||||
_print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)
|
||||
_print_installed_pkg(self.pkg.prefix)
|
||||
|
||||
# Send final status that install is successful
|
||||
spack.hooks.on_install_success(self.pkg.spec)
|
||||
|
||||
# preserve verbosity across runs
|
||||
return self.echo
|
||||
|
||||
@@ -2438,10 +2453,15 @@ def _real_install(self) -> None:
|
||||
# Catch any errors to report to logging
|
||||
self.timer.start(phase_fn.name)
|
||||
phase_fn.execute()
|
||||
spack.hooks.on_phase_success(pkg, phase_fn.name, log_file)
|
||||
self.timer.stop(phase_fn.name)
|
||||
|
||||
except BaseException:
|
||||
combine_phase_logs(pkg.phase_log_files, pkg.log_path)
|
||||
spack.hooks.on_phase_error(pkg, phase_fn.name, log_file)
|
||||
|
||||
# phase error indicates install error
|
||||
spack.hooks.on_install_failure(pkg.spec)
|
||||
raise
|
||||
|
||||
# We assume loggers share echo True/False
|
||||
|
||||
@@ -1038,9 +1038,9 @@ def finish_parse_and_run(parser, cmd_name, main_args, env_format_error):
|
||||
set_working_dir()
|
||||
|
||||
# now we can actually execute the command.
|
||||
if main_args.spack_profile or main_args.sorted_profile:
|
||||
if args.spack_profile or args.sorted_profile:
|
||||
_profile_wrapper(command, parser, args, unknown)
|
||||
elif main_args.pdb:
|
||||
elif args.pdb:
|
||||
import pdb
|
||||
|
||||
pdb.runctx("_invoke_command(command, parser, args, unknown)", globals(), locals())
|
||||
|
||||
@@ -43,7 +43,6 @@
|
||||
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.environment
|
||||
import spack.error
|
||||
import spack.modules.common
|
||||
@@ -54,7 +53,6 @@
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.tengine as tengine
|
||||
import spack.user_environment
|
||||
import spack.util.environment
|
||||
import spack.util.file_permissions as fp
|
||||
import spack.util.path
|
||||
@@ -697,33 +695,28 @@ def environment_modifications(self):
|
||||
)
|
||||
spack.config.merge_yaml(
|
||||
prefix_inspections,
|
||||
spack.config.get(f"modules:{self.conf.name}:prefix_inspections", {}),
|
||||
spack.config.get("modules:%s:prefix_inspections" % self.conf.name, {}),
|
||||
)
|
||||
|
||||
use_view = spack.config.get(f"modules:{self.conf.name}:use_view", False)
|
||||
|
||||
assert isinstance(use_view, (bool, str))
|
||||
use_view = spack.config.get("modules:%s:use_view" % self.conf.name, False)
|
||||
|
||||
spec = self.spec.copy() # defensive copy before setting prefix
|
||||
if use_view:
|
||||
if use_view is True:
|
||||
use_view = spack.environment.default_view_name
|
||||
|
||||
env = spack.environment.active_environment()
|
||||
if not env:
|
||||
raise spack.environment.SpackEnvironmentViewError(
|
||||
"Module generation with views requires active environment"
|
||||
)
|
||||
|
||||
view_name = spack.environment.default_view_name if use_view is True else use_view
|
||||
view = env.views[use_view]
|
||||
|
||||
if not env.has_view(view_name):
|
||||
raise spack.environment.SpackEnvironmentViewError(
|
||||
f"View {view_name} not found in environment {env.name} when generating modules"
|
||||
)
|
||||
|
||||
view = env.views[view_name]
|
||||
else:
|
||||
view = None
|
||||
spec.prefix = view.get_projection_for_spec(spec)
|
||||
|
||||
env = spack.util.environment.inspect_path(
|
||||
self.spec.prefix, prefix_inspections, exclude=spack.util.environment.is_system_path
|
||||
spec.prefix, prefix_inspections, exclude=spack.util.environment.is_system_path
|
||||
)
|
||||
|
||||
# Let the extendee/dependency modify their extensions/dependencies
|
||||
@@ -733,19 +726,13 @@ def environment_modifications(self):
|
||||
# whole chain of setup_dependent_package has to be followed from leaf to spec.
|
||||
# So: just run it here, but don't collect env mods.
|
||||
spack.build_environment.SetupContext(
|
||||
self.spec, context=Context.RUN
|
||||
spec, context=Context.RUN
|
||||
).set_all_package_py_globals()
|
||||
|
||||
# Then run setup_dependent_run_environment before setup_run_environment.
|
||||
for dep in self.spec.dependencies(deptype=("link", "run")):
|
||||
dep.package.setup_dependent_run_environment(env, self.spec)
|
||||
self.spec.package.setup_run_environment(env)
|
||||
|
||||
# Project the environment variables from prefix to view if needed
|
||||
if view and self.spec in view:
|
||||
spack.user_environment.project_env_mods(
|
||||
*self.spec.traverse(deptype=dt.LINK | dt.RUN), view=view, env=env
|
||||
)
|
||||
for dep in spec.dependencies(deptype=("link", "run")):
|
||||
dep.package.setup_dependent_run_environment(env, spec)
|
||||
spec.package.setup_run_environment(env)
|
||||
|
||||
# Modifications required from modules.yaml
|
||||
env.extend(self.conf.env)
|
||||
@@ -767,11 +754,11 @@ def environment_modifications(self):
|
||||
msg = "some tokens cannot be expanded in an environment variable name"
|
||||
_check_tokens_are_valid(x.name, message=msg)
|
||||
# Transform them
|
||||
x.name = self.spec.format(x.name, transform=transform)
|
||||
x.name = spec.format(x.name, transform=transform)
|
||||
if self.modification_needs_formatting(x):
|
||||
try:
|
||||
# Not every command has a value
|
||||
x.value = self.spec.format(x.value)
|
||||
x.value = spec.format(x.value)
|
||||
except AttributeError:
|
||||
pass
|
||||
x.name = str(x.name).replace("-", "_")
|
||||
|
||||
@@ -134,7 +134,7 @@ def upload_blob(
|
||||
return True
|
||||
|
||||
# Otherwise, do another PUT request.
|
||||
spack.oci.opener.ensure_status(request, response, 202)
|
||||
spack.oci.opener.ensure_status(response, 202)
|
||||
assert "Location" in response.headers
|
||||
|
||||
# Can be absolute or relative, joining handles both
|
||||
@@ -143,16 +143,19 @@ def upload_blob(
|
||||
)
|
||||
f.seek(0)
|
||||
|
||||
request = Request(
|
||||
url=upload_url,
|
||||
method="PUT",
|
||||
data=f,
|
||||
headers={"Content-Type": "application/octet-stream", "Content-Length": str(file_size)},
|
||||
response = _urlopen(
|
||||
Request(
|
||||
url=upload_url,
|
||||
method="PUT",
|
||||
data=f,
|
||||
headers={
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Content-Length": str(file_size),
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
response = _urlopen(request)
|
||||
|
||||
spack.oci.opener.ensure_status(request, response, 201)
|
||||
spack.oci.opener.ensure_status(response, 201)
|
||||
|
||||
# print elapsed time and # MB/s
|
||||
_log_upload_progress(digest, file_size, time.time() - start)
|
||||
@@ -186,16 +189,16 @@ def upload_manifest(
|
||||
if not tag:
|
||||
ref = ref.with_digest(digest)
|
||||
|
||||
request = Request(
|
||||
url=ref.manifest_url(),
|
||||
method="PUT",
|
||||
data=data,
|
||||
headers={"Content-Type": oci_manifest["mediaType"]},
|
||||
response = _urlopen(
|
||||
Request(
|
||||
url=ref.manifest_url(),
|
||||
method="PUT",
|
||||
data=data,
|
||||
headers={"Content-Type": oci_manifest["mediaType"]},
|
||||
)
|
||||
)
|
||||
|
||||
response = _urlopen(request)
|
||||
|
||||
spack.oci.opener.ensure_status(request, response, 201)
|
||||
spack.oci.opener.ensure_status(response, 201)
|
||||
return digest, size
|
||||
|
||||
|
||||
|
||||
@@ -310,15 +310,19 @@ def http_error_401(self, req: Request, fp, code, msg, headers):
|
||||
# Login failed, avoid infinite recursion where we go back and
|
||||
# forth between auth server and registry
|
||||
if hasattr(req, "login_attempted"):
|
||||
raise spack.util.web.DetailedHTTPError(
|
||||
req, code, f"Failed to login: {msg}", headers, fp
|
||||
raise urllib.error.HTTPError(
|
||||
req.full_url, code, f"Failed to login to {req.full_url}: {msg}", headers, fp
|
||||
)
|
||||
|
||||
# On 401 Unauthorized, parse the WWW-Authenticate header
|
||||
# to determine what authentication is required
|
||||
if "WWW-Authenticate" not in headers:
|
||||
raise spack.util.web.DetailedHTTPError(
|
||||
req, code, "Cannot login to registry, missing WWW-Authenticate header", headers, fp
|
||||
raise urllib.error.HTTPError(
|
||||
req.full_url,
|
||||
code,
|
||||
"Cannot login to registry, missing WWW-Authenticate header",
|
||||
headers,
|
||||
fp,
|
||||
)
|
||||
|
||||
header_value = headers["WWW-Authenticate"]
|
||||
@@ -326,8 +330,8 @@ def http_error_401(self, req: Request, fp, code, msg, headers):
|
||||
try:
|
||||
challenge = get_bearer_challenge(parse_www_authenticate(header_value))
|
||||
except ValueError as e:
|
||||
raise spack.util.web.DetailedHTTPError(
|
||||
req,
|
||||
raise urllib.error.HTTPError(
|
||||
req.full_url,
|
||||
code,
|
||||
f"Cannot login to registry, malformed WWW-Authenticate header: {header_value}",
|
||||
headers,
|
||||
@@ -336,8 +340,8 @@ def http_error_401(self, req: Request, fp, code, msg, headers):
|
||||
|
||||
# If there is no bearer challenge, we can't handle it
|
||||
if not challenge:
|
||||
raise spack.util.web.DetailedHTTPError(
|
||||
req,
|
||||
raise urllib.error.HTTPError(
|
||||
req.full_url,
|
||||
code,
|
||||
f"Cannot login to registry, unsupported authentication scheme: {header_value}",
|
||||
headers,
|
||||
@@ -352,8 +356,8 @@ def http_error_401(self, req: Request, fp, code, msg, headers):
|
||||
timeout=req.timeout,
|
||||
)
|
||||
except ValueError as e:
|
||||
raise spack.util.web.DetailedHTTPError(
|
||||
req,
|
||||
raise urllib.error.HTTPError(
|
||||
req.full_url,
|
||||
code,
|
||||
f"Cannot login to registry, failed to obtain bearer token: {e}",
|
||||
headers,
|
||||
@@ -408,13 +412,13 @@ def create_opener():
|
||||
return opener
|
||||
|
||||
|
||||
def ensure_status(request: urllib.request.Request, response: HTTPResponse, status: int):
|
||||
def ensure_status(response: HTTPResponse, status: int):
|
||||
"""Raise an error if the response status is not the expected one."""
|
||||
if response.status == status:
|
||||
return
|
||||
|
||||
raise spack.util.web.DetailedHTTPError(
|
||||
request, response.status, response.reason, response.info(), None
|
||||
raise urllib.error.HTTPError(
|
||||
response.geturl(), response.status, response.reason, response.info(), None
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -9,8 +9,6 @@
|
||||
import platform
|
||||
import subprocess
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
from spack.error import SpackError
|
||||
from spack.util import windows_registry as winreg
|
||||
from spack.version import Version
|
||||
@@ -85,50 +83,11 @@ def compiler_search_paths(self):
|
||||
os.path.join(str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin")
|
||||
)
|
||||
)
|
||||
|
||||
# Second strategy: Find MSVC via the registry
|
||||
def try_query_registry(retry=False):
|
||||
winreg_report_error = lambda e: tty.debug(
|
||||
'Windows registry query on "SOFTWARE\\WOW6432Node\\Microsoft"'
|
||||
f"under HKEY_LOCAL_MACHINE: {str(e)}"
|
||||
)
|
||||
try:
|
||||
# Registry interactions are subject to race conditions, etc and can generally
|
||||
# be flakey, do this in a catch block to prevent reg issues from interfering
|
||||
# with compiler detection
|
||||
msft = winreg.WindowsRegistryView(
|
||||
"SOFTWARE\\WOW6432Node\\Microsoft", winreg.HKEY.HKEY_LOCAL_MACHINE
|
||||
)
|
||||
return msft.find_subkeys(r"VisualStudio_.*", recursive=False)
|
||||
except OSError as e:
|
||||
# OSErrors propagated into caller by Spack's registry module are expected
|
||||
# and indicate a known issue with the registry query
|
||||
# i.e. user does not have permissions or the key/value
|
||||
# doesn't exist
|
||||
winreg_report_error(e)
|
||||
return []
|
||||
except winreg.InvalidRegistryOperation as e:
|
||||
# Other errors raised by the Spack's reg module indicate
|
||||
# an unexpected error type, and are handled specifically
|
||||
# as the underlying cause is difficult/impossible to determine
|
||||
# without manually exploring the registry
|
||||
# These errors can also be spurious (race conditions)
|
||||
# and may resolve on re-execution of the query
|
||||
# or are permanent (specific types of permission issues)
|
||||
# but the registry raises the same exception for all types of
|
||||
# atypical errors
|
||||
if retry:
|
||||
winreg_report_error(e)
|
||||
return []
|
||||
|
||||
vs_entries = try_query_registry()
|
||||
if not vs_entries:
|
||||
# Occasional spurious race conditions can arise when reading the MS reg
|
||||
# typically these race conditions resolve immediately and we can safely
|
||||
# retry the reg query without waiting
|
||||
# Note: Winreg does not support locking
|
||||
vs_entries = try_query_registry(retry=True)
|
||||
|
||||
msft = winreg.WindowsRegistryView(
|
||||
"SOFTWARE\\WOW6432Node\\Microsoft", winreg.HKEY.HKEY_LOCAL_MACHINE
|
||||
)
|
||||
vs_entries = msft.find_subkeys(r"VisualStudio_.*")
|
||||
vs_paths = []
|
||||
|
||||
def clean_vs_path(path):
|
||||
@@ -140,8 +99,11 @@ def clean_vs_path(path):
|
||||
val = entry.get_subkey("Capabilities").get_value("ApplicationDescription").value
|
||||
vs_paths.append(clean_vs_path(val))
|
||||
except FileNotFoundError as e:
|
||||
if hasattr(e, "winerror") and e.winerror == 2:
|
||||
pass
|
||||
if hasattr(e, "winerror"):
|
||||
if e.winerror == 2:
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
@@ -24,7 +24,6 @@
|
||||
import textwrap
|
||||
import time
|
||||
import traceback
|
||||
import typing
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union
|
||||
|
||||
@@ -67,7 +66,7 @@
|
||||
from spack.stage import DIYStage, ResourceStage, Stage, StageComposite, compute_stage_name
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.package_hash import package_hash
|
||||
from spack.version import GitVersion, StandardVersion
|
||||
from spack.version import GitVersion, StandardVersion, Version
|
||||
|
||||
FLAG_HANDLER_RETURN_TYPE = Tuple[
|
||||
Optional[Iterable[str]], Optional[Iterable[str]], Optional[Iterable[str]]
|
||||
@@ -94,26 +93,29 @@
|
||||
spack_times_log = "install_times.json"
|
||||
|
||||
|
||||
def deprecated_version(pkg: "PackageBase", version: Union[str, StandardVersion]) -> bool:
|
||||
"""Return True iff the version is deprecated.
|
||||
def deprecated_version(pkg, version):
|
||||
"""Return True if the version is deprecated, False otherwise.
|
||||
|
||||
Arguments:
|
||||
pkg: The package whose version is to be checked.
|
||||
version: The version being checked
|
||||
pkg (PackageBase): The package whose version is to be checked.
|
||||
version (str or spack.version.StandardVersion): The version being checked
|
||||
"""
|
||||
if not isinstance(version, StandardVersion):
|
||||
version = StandardVersion.from_string(version)
|
||||
version = Version(version)
|
||||
|
||||
details = pkg.versions.get(version)
|
||||
return details is not None and details.get("deprecated", False)
|
||||
for k, v in pkg.versions.items():
|
||||
if version == k and v.get("deprecated", False):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def preferred_version(pkg: "PackageBase"):
|
||||
def preferred_version(pkg):
|
||||
"""
|
||||
Returns a sorted list of the preferred versions of the package.
|
||||
|
||||
Arguments:
|
||||
pkg: The package whose versions are to be assessed.
|
||||
pkg (PackageBase): The package whose versions are to be assessed.
|
||||
"""
|
||||
# Here we sort first on the fact that a version is marked
|
||||
# as preferred in the package, then on the fact that the
|
||||
@@ -730,13 +732,13 @@ def dependencies_by_name(cls, when: bool = False):
|
||||
@classmethod
|
||||
def possible_dependencies(
|
||||
cls,
|
||||
transitive: bool = True,
|
||||
expand_virtuals: bool = True,
|
||||
transitive=True,
|
||||
expand_virtuals=True,
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
visited: Optional[dict] = None,
|
||||
missing: Optional[dict] = None,
|
||||
virtuals: Optional[set] = None,
|
||||
) -> Dict[str, Set[str]]:
|
||||
visited=None,
|
||||
missing=None,
|
||||
virtuals=None,
|
||||
):
|
||||
"""Return dict of possible dependencies of this package.
|
||||
|
||||
Args:
|
||||
@@ -900,16 +902,22 @@ def version(self):
|
||||
|
||||
@classmethod
|
||||
@memoized
|
||||
def version_urls(cls) -> Dict[StandardVersion, str]:
|
||||
"""Dict of explicitly defined URLs for versions of this package.
|
||||
def version_urls(cls):
|
||||
"""OrderedDict of explicitly defined URLs for versions of this package.
|
||||
|
||||
Return:
|
||||
An dict mapping version to url, ordered by version.
|
||||
An OrderedDict (version -> URL) different versions of this
|
||||
package, sorted by version.
|
||||
|
||||
A version's URL only appears in the result if it has an an explicitly defined ``url``
|
||||
argument. So, this list may be empty if a package only defines ``url`` at the top level.
|
||||
A version's URL only appears in the result if it has an an
|
||||
explicitly defined ``url`` argument. So, this list may be empty
|
||||
if a package only defines ``url`` at the top level.
|
||||
"""
|
||||
return {v: args["url"] for v, args in sorted(cls.versions.items()) if "url" in args}
|
||||
version_urls = collections.OrderedDict()
|
||||
for v, args in sorted(cls.versions.items()):
|
||||
if "url" in args:
|
||||
version_urls[v] = args["url"]
|
||||
return version_urls
|
||||
|
||||
def nearest_url(self, version):
|
||||
"""Finds the URL with the "closest" version to ``version``.
|
||||
@@ -952,39 +960,36 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
pass
|
||||
|
||||
def all_urls_for_version(self, version: StandardVersion) -> List[str]:
|
||||
def all_urls_for_version(self, version):
|
||||
"""Return all URLs derived from version_urls(), url, urls, and
|
||||
list_url (if it contains a version) in a package in that order.
|
||||
|
||||
Args:
|
||||
version: the version for which a URL is sought
|
||||
version (spack.version.Version): the version for which a URL is sought
|
||||
"""
|
||||
uf = None
|
||||
if type(self).url_for_version != PackageBase.url_for_version:
|
||||
uf = self.url_for_version
|
||||
return self._implement_all_urls_for_version(version, uf)
|
||||
|
||||
def _implement_all_urls_for_version(
|
||||
self,
|
||||
version: Union[str, StandardVersion],
|
||||
custom_url_for_version: Optional[Callable[[StandardVersion], Optional[str]]] = None,
|
||||
) -> List[str]:
|
||||
version = StandardVersion.from_string(version) if isinstance(version, str) else version
|
||||
def _implement_all_urls_for_version(self, version, custom_url_for_version=None):
|
||||
if not isinstance(version, StandardVersion):
|
||||
version = Version(version)
|
||||
|
||||
urls: List[str] = []
|
||||
urls = []
|
||||
|
||||
# If we have a specific URL for this version, don't extrapolate.
|
||||
url = self.version_urls().get(version)
|
||||
if url:
|
||||
urls.append(url)
|
||||
version_urls = self.version_urls()
|
||||
if version in version_urls:
|
||||
urls.append(version_urls[version])
|
||||
|
||||
# if there is a custom url_for_version, use it
|
||||
if custom_url_for_version is not None:
|
||||
u = custom_url_for_version(version)
|
||||
if u is not None and u not in urls:
|
||||
if u not in urls and u is not None:
|
||||
urls.append(u)
|
||||
|
||||
def sub_and_add(u: Optional[str]) -> None:
|
||||
def sub_and_add(u):
|
||||
if u is None:
|
||||
return
|
||||
# skip the url if there is no version to replace
|
||||
@@ -992,7 +997,9 @@ def sub_and_add(u: Optional[str]) -> None:
|
||||
spack.url.parse_version(u)
|
||||
except spack.url.UndetectableVersionError:
|
||||
return
|
||||
urls.append(spack.url.substitute_version(u, self.url_version(version)))
|
||||
nu = spack.url.substitute_version(u, self.url_version(version))
|
||||
|
||||
urls.append(nu)
|
||||
|
||||
# If no specific URL, use the default, class-level URL
|
||||
sub_and_add(getattr(self, "url", None))
|
||||
@@ -1123,7 +1130,13 @@ def stage(self, stage):
|
||||
@property
|
||||
def env_path(self):
|
||||
"""Return the build environment file path associated with staging."""
|
||||
return os.path.join(self.stage.path, _spack_build_envfile)
|
||||
# Backward compatibility: Return the name of an existing log path;
|
||||
# otherwise, return the current install env path name.
|
||||
old_filename = os.path.join(self.stage.path, "spack-build.env")
|
||||
if os.path.exists(old_filename):
|
||||
return old_filename
|
||||
else:
|
||||
return os.path.join(self.stage.path, _spack_build_envfile)
|
||||
|
||||
@property
|
||||
def env_mods_path(self):
|
||||
@@ -1154,6 +1167,13 @@ def install_env_path(self):
|
||||
@property
|
||||
def log_path(self):
|
||||
"""Return the build log file path associated with staging."""
|
||||
# Backward compatibility: Return the name of an existing log path.
|
||||
for filename in ["spack-build.out", "spack-build.txt"]:
|
||||
old_log = os.path.join(self.stage.path, filename)
|
||||
if os.path.exists(old_log):
|
||||
return old_log
|
||||
|
||||
# Otherwise, return the current log path name.
|
||||
return os.path.join(self.stage.path, _spack_build_logfile)
|
||||
|
||||
@property
|
||||
@@ -1166,15 +1186,15 @@ def phase_log_files(self):
|
||||
|
||||
@property
|
||||
def install_log_path(self):
|
||||
"""Return the (compressed) build log file path on successful installation"""
|
||||
"""Return the build log file path on successful installation."""
|
||||
# Backward compatibility: Return the name of an existing install log.
|
||||
for filename in [_spack_build_logfile, "build.out", "build.txt"]:
|
||||
for filename in ["build.out", "build.txt"]:
|
||||
old_log = os.path.join(self.metadata_dir, filename)
|
||||
if os.path.exists(old_log):
|
||||
return old_log
|
||||
|
||||
# Otherwise, return the current install log path name.
|
||||
return os.path.join(self.metadata_dir, _spack_build_logfile + ".gz")
|
||||
return os.path.join(self.metadata_dir, _spack_build_logfile)
|
||||
|
||||
@property
|
||||
def configure_args_path(self):
|
||||
@@ -1392,9 +1412,13 @@ def download_instr(self):
|
||||
(str): default manual download instructions
|
||||
"""
|
||||
required = (
|
||||
f"Manual download is required for {self.spec.name}. " if self.manual_download else ""
|
||||
"Manual download is required for {0}. ".format(self.spec.name)
|
||||
if self.manual_download
|
||||
else ""
|
||||
)
|
||||
return "{0}Refer to {1} for download instructions.".format(
|
||||
required, self.spec.package.homepage
|
||||
)
|
||||
return f"{required}Refer to {self.homepage} for download instructions."
|
||||
|
||||
def do_fetch(self, mirror_only=False):
|
||||
"""
|
||||
@@ -2067,6 +2091,15 @@ def unit_test_check(self):
|
||||
"""
|
||||
return True
|
||||
|
||||
@property
|
||||
def build_log_path(self):
|
||||
"""
|
||||
Return the expected (or current) build log file path. The path points
|
||||
to the staging build file until the software is successfully installed,
|
||||
when it points to the file in the installation directory.
|
||||
"""
|
||||
return self.install_log_path if self.spec.installed else self.log_path
|
||||
|
||||
@classmethod
|
||||
def inject_flags(cls: Type[Pb], name: str, flags: Iterable[str]) -> FLAG_HANDLER_RETURN_TYPE:
|
||||
"""
|
||||
@@ -2350,14 +2383,15 @@ def format_doc(cls, **kwargs):
|
||||
return results.getvalue()
|
||||
|
||||
@property
|
||||
def all_urls(self) -> List[str]:
|
||||
def all_urls(self):
|
||||
"""A list of all URLs in a package.
|
||||
|
||||
Check both class-level and version-specific URLs.
|
||||
|
||||
Returns a list of URLs
|
||||
Returns:
|
||||
list: a list of URLs
|
||||
"""
|
||||
urls: List[str] = []
|
||||
urls = []
|
||||
if hasattr(self, "url") and self.url:
|
||||
urls.append(self.url)
|
||||
|
||||
@@ -2370,9 +2404,7 @@ def all_urls(self) -> List[str]:
|
||||
urls.append(args["url"])
|
||||
return urls
|
||||
|
||||
def fetch_remote_versions(
|
||||
self, concurrency: Optional[int] = None
|
||||
) -> Dict[StandardVersion, str]:
|
||||
def fetch_remote_versions(self, concurrency=None):
|
||||
"""Find remote versions of this package.
|
||||
|
||||
Uses ``list_url`` and any other URLs listed in the package file.
|
||||
@@ -2461,21 +2493,14 @@ def flatten_dependencies(spec, flat_dir):
|
||||
dep_files.merge(flat_dir + "/" + name)
|
||||
|
||||
|
||||
def possible_dependencies(
|
||||
*pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]],
|
||||
transitive: bool = True,
|
||||
expand_virtuals: bool = True,
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
missing: Optional[dict] = None,
|
||||
virtuals: Optional[set] = None,
|
||||
) -> Dict[str, Set[str]]:
|
||||
def possible_dependencies(*pkg_or_spec, **kwargs):
|
||||
"""Get the possible dependencies of a number of packages.
|
||||
|
||||
See ``PackageBase.possible_dependencies`` for details.
|
||||
"""
|
||||
packages = []
|
||||
for pos in pkg_or_spec:
|
||||
if isinstance(pos, PackageMeta) and issubclass(pos, PackageBase):
|
||||
if isinstance(pos, PackageMeta):
|
||||
packages.append(pos)
|
||||
continue
|
||||
|
||||
@@ -2488,16 +2513,9 @@ def possible_dependencies(
|
||||
else:
|
||||
packages.append(pos.package_class)
|
||||
|
||||
visited: Dict[str, Set[str]] = {}
|
||||
visited = {}
|
||||
for pkg in packages:
|
||||
pkg.possible_dependencies(
|
||||
visited=visited,
|
||||
transitive=transitive,
|
||||
expand_virtuals=expand_virtuals,
|
||||
depflag=depflag,
|
||||
missing=missing,
|
||||
virtuals=virtuals,
|
||||
)
|
||||
pkg.possible_dependencies(visited=visited, **kwargs)
|
||||
|
||||
return visited
|
||||
|
||||
@@ -2545,7 +2563,3 @@ class DependencyConflictError(spack.error.SpackError):
|
||||
|
||||
def __init__(self, conflict):
|
||||
super().__init__("%s conflicts with another file in the flattened directory." % (conflict))
|
||||
|
||||
|
||||
class ManualDownloadRequiredError(InvalidPackageOpError):
|
||||
"""Raised when attempting an invalid operation on a package that requires a manual download."""
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import os
|
||||
import re
|
||||
from collections import OrderedDict
|
||||
from typing import List, Optional
|
||||
|
||||
import macholib.mach_o
|
||||
import macholib.MachO
|
||||
@@ -48,7 +47,7 @@ def __init__(self, file_path, root_path):
|
||||
|
||||
|
||||
@memoized
|
||||
def _patchelf() -> Optional[executable.Executable]:
|
||||
def _patchelf():
|
||||
"""Return the full path to the patchelf binary, if available, else None."""
|
||||
import spack.bootstrap
|
||||
|
||||
@@ -56,7 +55,9 @@ def _patchelf() -> Optional[executable.Executable]:
|
||||
return None
|
||||
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
return spack.bootstrap.ensure_patchelf_in_path_or_raise()
|
||||
patchelf = spack.bootstrap.ensure_patchelf_in_path_or_raise()
|
||||
|
||||
return patchelf.path
|
||||
|
||||
|
||||
def _elf_rpaths_for(path):
|
||||
@@ -339,34 +340,31 @@ def macholib_get_paths(cur_path):
|
||||
return (rpaths, deps, ident)
|
||||
|
||||
|
||||
def _set_elf_rpaths_and_interpreter(
|
||||
target: str, rpaths: List[str], interpreter: Optional[str] = None
|
||||
) -> Optional[str]:
|
||||
"""Replace the original RPATH of the target with the paths passed as arguments.
|
||||
def _set_elf_rpaths(target, rpaths):
|
||||
"""Replace the original RPATH of the target with the paths passed
|
||||
as arguments.
|
||||
|
||||
Args:
|
||||
target: target executable. Must be an ELF object.
|
||||
rpaths: paths to be set in the RPATH
|
||||
interpreter: optionally set the interpreter
|
||||
|
||||
Returns:
|
||||
A string concatenating the stdout and stderr of the call to ``patchelf`` if it was invoked
|
||||
A string concatenating the stdout and stderr of the call
|
||||
to ``patchelf`` if it was invoked
|
||||
"""
|
||||
# Join the paths using ':' as a separator
|
||||
rpaths_str = ":".join(rpaths)
|
||||
|
||||
patchelf, output = executable.Executable(_patchelf()), None
|
||||
try:
|
||||
# TODO: error handling is not great here?
|
||||
# TODO: revisit the use of --force-rpath as it might be conditional
|
||||
# TODO: if we want to support setting RUNPATH from binary packages
|
||||
args = ["--force-rpath", "--set-rpath", rpaths_str]
|
||||
if interpreter:
|
||||
args.extend(["--set-interpreter", interpreter])
|
||||
args.append(target)
|
||||
return _patchelf()(*args, output=str, error=str)
|
||||
patchelf_args = ["--force-rpath", "--set-rpath", rpaths_str, target]
|
||||
output = patchelf(*patchelf_args, output=str, error=str)
|
||||
except executable.ProcessError as e:
|
||||
tty.warn(str(e))
|
||||
return None
|
||||
msg = "patchelf --force-rpath --set-rpath {0} failed with error {1}"
|
||||
tty.warn(msg.format(target, e))
|
||||
return output
|
||||
|
||||
|
||||
def needs_binary_relocation(m_type, m_subtype):
|
||||
@@ -503,12 +501,10 @@ def new_relocate_elf_binaries(binaries, prefix_to_prefix):
|
||||
|
||||
for path in binaries:
|
||||
try:
|
||||
elf.substitute_rpath_and_pt_interp_in_place_or_raise(path, prefix_to_prefix)
|
||||
except elf.ElfCStringUpdatesFailed as e:
|
||||
# Fall back to `patchelf --set-rpath ... --set-interpreter ...`
|
||||
rpaths = e.rpath.new_value.decode("utf-8").split(":") if e.rpath else []
|
||||
interpreter = e.pt_interp.new_value.decode("utf-8") if e.pt_interp else None
|
||||
_set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter)
|
||||
elf.replace_rpath_in_place_or_raise(path, prefix_to_prefix)
|
||||
except elf.ElfDynamicSectionUpdateFailed as e:
|
||||
# Fall back to the old `patchelf --set-rpath` method.
|
||||
_set_elf_rpaths(path, e.new.decode("utf-8").split(":"))
|
||||
|
||||
|
||||
def relocate_elf_binaries(
|
||||
@@ -550,10 +546,10 @@ def relocate_elf_binaries(
|
||||
new_rpaths = _make_relative(new_binary, new_root, new_norm_rpaths)
|
||||
# check to see if relative rpaths are changed before rewriting
|
||||
if sorted(new_rpaths) != sorted(orig_rpaths):
|
||||
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
|
||||
_set_elf_rpaths(new_binary, new_rpaths)
|
||||
else:
|
||||
new_rpaths = _transform_rpaths(orig_rpaths, orig_root, new_prefixes)
|
||||
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
|
||||
_set_elf_rpaths(new_binary, new_rpaths)
|
||||
|
||||
|
||||
def make_link_relative(new_links, orig_links):
|
||||
@@ -600,7 +596,7 @@ def make_elf_binaries_relative(new_binaries, orig_binaries, orig_layout_root):
|
||||
orig_rpaths = _elf_rpaths_for(new_binary)
|
||||
if orig_rpaths:
|
||||
new_rpaths = _make_relative(orig_binary, orig_layout_root, orig_rpaths)
|
||||
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
|
||||
_set_elf_rpaths(new_binary, new_rpaths)
|
||||
|
||||
|
||||
def warn_if_link_cant_be_relocated(link, target):
|
||||
|
||||
@@ -490,7 +490,7 @@ def read(self, stream):
|
||||
self.index = spack.tag.TagIndex.from_json(stream, self.repository)
|
||||
|
||||
def update(self, pkg_fullname):
|
||||
self.index.update_package(pkg_fullname.split(".")[-1])
|
||||
self.index.update_package(pkg_fullname)
|
||||
|
||||
def write(self, stream):
|
||||
self.index.to_json(stream)
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import collections
|
||||
import contextlib
|
||||
import functools
|
||||
import gzip
|
||||
import os
|
||||
import time
|
||||
import traceback
|
||||
@@ -191,13 +190,9 @@ def on_success(self, pkg, kwargs, package_record):
|
||||
|
||||
def fetch_log(self, pkg):
|
||||
try:
|
||||
if os.path.exists(pkg.install_log_path):
|
||||
stream = gzip.open(pkg.install_log_path, "rt")
|
||||
else:
|
||||
stream = open(pkg.log_path)
|
||||
with stream as f:
|
||||
return f.read()
|
||||
except OSError:
|
||||
with open(pkg.build_log_path, "r", encoding="utf-8") as stream:
|
||||
return "".join(stream.readlines())
|
||||
except Exception:
|
||||
return f"Cannot open log for {pkg.spec.cshort_spec}"
|
||||
|
||||
def extract_package_from_signature(self, instance, *args, **kwargs):
|
||||
|
||||
@@ -3,17 +3,16 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Schema for bootstrap.yaml configuration file."""
|
||||
from typing import Any, Dict
|
||||
|
||||
#: Schema of a single source
|
||||
_source_schema: Dict[str, Any] = {
|
||||
_source_schema = {
|
||||
"type": "object",
|
||||
"properties": {"name": {"type": "string"}, "metadata": {"type": "string"}},
|
||||
"additionalProperties": False,
|
||||
"required": ["name", "metadata"],
|
||||
}
|
||||
|
||||
properties: Dict[str, Any] = {
|
||||
properties = {
|
||||
"bootstrap": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
||||
@@ -6,31 +6,27 @@
|
||||
"""Schema for a buildcache spec.yaml file
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/buildcache_spec.py
|
||||
:lines: 15-
|
||||
:lines: 13-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
import spack.schema.spec
|
||||
|
||||
properties: Dict[str, Any] = {
|
||||
# `buildinfo` is no longer needed as of Spack 0.21
|
||||
"buildinfo": {"type": "object"},
|
||||
"spec": {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
"items": spack.schema.spec.properties,
|
||||
},
|
||||
"binary_cache_checksum": {
|
||||
"type": "object",
|
||||
"properties": {"hash_algorithm": {"type": "string"}, "hash": {"type": "string"}},
|
||||
},
|
||||
"buildcache_layout_version": {"type": "number"},
|
||||
}
|
||||
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack buildcache specfile schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
"properties": {
|
||||
# `buildinfo` is no longer needed as of Spack 0.21
|
||||
"buildinfo": {"type": "object"},
|
||||
"spec": {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
"items": spack.schema.spec.properties,
|
||||
},
|
||||
"binary_cache_checksum": {
|
||||
"type": "object",
|
||||
"properties": {"hash_algorithm": {"type": "string"}, "hash": {"type": "string"}},
|
||||
},
|
||||
"buildcache_layout_version": {"type": "number"},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -2,15 +2,16 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for cdash.yaml configuration file.
|
||||
|
||||
.. literalinclude:: ../spack/schema/cdash.py
|
||||
:lines: 13-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {
|
||||
properties = {
|
||||
"cdash": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for gitlab-ci.yaml configuration file.
|
||||
|
||||
.. literalinclude:: ../spack/schema/ci.py
|
||||
:lines: 16-
|
||||
:lines: 13-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
@@ -164,7 +164,7 @@
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {
|
||||
properties = {
|
||||
"ci": {
|
||||
"oneOf": [
|
||||
# TODO: Replace with core-shared-properties in Spack 0.23
|
||||
|
||||
@@ -2,17 +2,16 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for compilers.yaml configuration file.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/compilers.py
|
||||
:lines: 15-
|
||||
:lines: 13-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
import spack.schema.environment
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {
|
||||
properties = {
|
||||
"compilers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
|
||||
@@ -2,14 +2,14 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for concretizer.yaml configuration file.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/concretizer.py
|
||||
:lines: 12-
|
||||
:lines: 13-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
properties: Dict[str, Any] = {
|
||||
properties = {
|
||||
"concretizer": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
|
||||
@@ -5,16 +5,15 @@
|
||||
"""Schema for config.yaml configuration file.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/config.py
|
||||
:lines: 17-
|
||||
:lines: 13-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
import spack.schema.projections
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {
|
||||
properties = {
|
||||
"config": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Schema for the 'container' subsection of Spack environments."""
|
||||
from typing import Any, Dict
|
||||
|
||||
_stages_from_dockerhub = {
|
||||
"type": "object",
|
||||
@@ -86,4 +85,4 @@
|
||||
},
|
||||
}
|
||||
|
||||
properties: Dict[str, Any] = {"container": container_schema}
|
||||
properties = {"container": container_schema}
|
||||
|
||||
@@ -11,115 +11,112 @@
|
||||
This does not specify a configuration - it is an input format
|
||||
that is consumed and transformed into Spack DB records.
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
properties: Dict[str, Any] = {
|
||||
"_meta": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"file-type": {"type": "string", "minLength": 1},
|
||||
"cpe-version": {"type": "string", "minLength": 1},
|
||||
"system-type": {"type": "string", "minLength": 1},
|
||||
"schema-version": {"type": "string", "minLength": 1},
|
||||
# Older schemas use did not have "cpe-version", just the
|
||||
# schema version; in that case it was just called "version"
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
},
|
||||
},
|
||||
"compilers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
"prefix": {"type": "string", "minLength": 1},
|
||||
"executables": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"cc": {"type": "string", "minLength": 1},
|
||||
"cxx": {"type": "string", "minLength": 1},
|
||||
"fc": {"type": "string", "minLength": 1},
|
||||
},
|
||||
},
|
||||
"arch": {
|
||||
"type": "object",
|
||||
"required": ["os", "target"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"os": {"type": "string", "minLength": 1},
|
||||
"target": {"type": "string", "minLength": 1},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"specs": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["name", "version", "arch", "compiler", "prefix", "hash"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
"arch": {
|
||||
"type": "object",
|
||||
"required": ["platform", "platform_os", "target"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"platform": {"type": "string", "minLength": 1},
|
||||
"platform_os": {"type": "string", "minLength": 1},
|
||||
"target": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["name"],
|
||||
"properties": {"name": {"type": "string", "minLength": 1}},
|
||||
},
|
||||
},
|
||||
},
|
||||
"compiler": {
|
||||
"type": "object",
|
||||
"required": ["name", "version"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
},
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"\\w[\\w-]*": {
|
||||
"type": "object",
|
||||
"required": ["hash"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"hash": {"type": "string", "minLength": 1},
|
||||
"type": {
|
||||
"type": "array",
|
||||
"items": {"type": "string", "minLength": 1},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
"prefix": {"type": "string", "minLength": 1},
|
||||
"rpm": {"type": "string", "minLength": 1},
|
||||
"hash": {"type": "string", "minLength": 1},
|
||||
"parameters": {"type": "object"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/schema#",
|
||||
"title": "CPE manifest schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
"properties": {
|
||||
"_meta": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"file-type": {"type": "string", "minLength": 1},
|
||||
"cpe-version": {"type": "string", "minLength": 1},
|
||||
"system-type": {"type": "string", "minLength": 1},
|
||||
"schema-version": {"type": "string", "minLength": 1},
|
||||
# Older schemas use did not have "cpe-version", just the
|
||||
# schema version; in that case it was just called "version"
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
},
|
||||
},
|
||||
"compilers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
"prefix": {"type": "string", "minLength": 1},
|
||||
"executables": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"cc": {"type": "string", "minLength": 1},
|
||||
"cxx": {"type": "string", "minLength": 1},
|
||||
"fc": {"type": "string", "minLength": 1},
|
||||
},
|
||||
},
|
||||
"arch": {
|
||||
"type": "object",
|
||||
"required": ["os", "target"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"os": {"type": "string", "minLength": 1},
|
||||
"target": {"type": "string", "minLength": 1},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"specs": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["name", "version", "arch", "compiler", "prefix", "hash"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
"arch": {
|
||||
"type": "object",
|
||||
"required": ["platform", "platform_os", "target"],
|
||||
"additioanlProperties": False,
|
||||
"properties": {
|
||||
"platform": {"type": "string", "minLength": 1},
|
||||
"platform_os": {"type": "string", "minLength": 1},
|
||||
"target": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["name"],
|
||||
"properties": {"name": {"type": "string", "minLength": 1}},
|
||||
},
|
||||
},
|
||||
},
|
||||
"compiler": {
|
||||
"type": "object",
|
||||
"required": ["name", "version"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
},
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"\\w[\\w-]*": {
|
||||
"type": "object",
|
||||
"required": ["hash"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"hash": {"type": "string", "minLength": 1},
|
||||
"type": {
|
||||
"type": "array",
|
||||
"items": {"type": "string", "minLength": 1},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
"prefix": {"type": "string", "minLength": 1},
|
||||
"rpm": {"type": "string", "minLength": 1},
|
||||
"hash": {"type": "string", "minLength": 1},
|
||||
"parameters": {"type": "object"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -6,41 +6,12 @@
|
||||
"""Schema for database index.json file
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/database_index.py
|
||||
:lines: 17-
|
||||
:lines: 36-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
import spack.schema.spec
|
||||
|
||||
# spack.schema.spec.properties
|
||||
|
||||
properties: Dict[str, Any] = {
|
||||
"database": {
|
||||
"type": "object",
|
||||
"required": ["installs", "version"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"installs": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
r"^[\w\d]{32}$": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"spec": spack.schema.spec.properties,
|
||||
"path": {"oneOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"installed": {"type": "boolean"},
|
||||
"ref_count": {"type": "integer", "minimum": 0},
|
||||
"explicit": {"type": "boolean"},
|
||||
"installation_time": {"type": "number"},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
"version": {"type": "string"},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
@@ -48,5 +19,30 @@
|
||||
"type": "object",
|
||||
"required": ["database"],
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
"properties": {
|
||||
"database": {
|
||||
"type": "object",
|
||||
"required": ["installs", "version"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"installs": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
r"^[\w\d]{32}$": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"spec": spack.schema.spec.properties,
|
||||
"path": {"oneOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"installed": {"type": "boolean"},
|
||||
"ref_count": {"type": "integer", "minimum": 0},
|
||||
"explicit": {"type": "boolean"},
|
||||
"installation_time": {"type": "number"},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
"version": {"type": "string"},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -6,14 +6,13 @@
|
||||
"""Schema for definitions
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/definitions.py
|
||||
:lines: 16-
|
||||
:lines: 13-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
import spack.schema
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {
|
||||
properties = {
|
||||
"definitions": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import Any, Dict
|
||||
|
||||
properties: Dict[str, Any] = {
|
||||
|
||||
properties = {
|
||||
"develop": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
|
||||
@@ -6,10 +6,8 @@
|
||||
"""Schema for env.yaml configuration file.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/env.py
|
||||
:lines: 19-
|
||||
:lines: 36-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
import spack.schema.gitlab_ci # DEPRECATED
|
||||
@@ -21,31 +19,61 @@
|
||||
|
||||
projections_scheme = spack.schema.projections.properties["projections"]
|
||||
|
||||
properties: Dict[str, Any] = {
|
||||
"spack": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": union_dicts(
|
||||
# Include deprecated "gitlab-ci" section
|
||||
spack.schema.gitlab_ci.properties,
|
||||
# merged configuration scope schemas
|
||||
spack.schema.merged.properties,
|
||||
# extra environment schema properties
|
||||
{
|
||||
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
"specs": spack.schema.spec_list_schema,
|
||||
},
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack environment file schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
"properties": {
|
||||
"spack": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": union_dicts(
|
||||
# Include deprecated "gitlab-ci" section
|
||||
spack.schema.gitlab_ci.properties,
|
||||
# merged configuration scope schemas
|
||||
spack.schema.merged.properties,
|
||||
# extra environment schema properties
|
||||
{
|
||||
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
"specs": spack.schema.spec_list_schema,
|
||||
"view": {
|
||||
"anyOf": [
|
||||
{"type": "boolean"},
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
r"\w+": {
|
||||
"required": ["root"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"root": {"type": "string"},
|
||||
"link": {
|
||||
"type": "string",
|
||||
"pattern": "(roots|all|run)",
|
||||
},
|
||||
"link_type": {"type": "string"},
|
||||
"select": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
"projections": projections_scheme,
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
},
|
||||
),
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
schemas.
|
||||
"""
|
||||
import collections.abc
|
||||
from typing import Any, Dict
|
||||
|
||||
array_of_strings_or_num = {
|
||||
"type": "array",
|
||||
@@ -19,7 +18,7 @@
|
||||
"patternProperties": {r"\w[\w-]*": {"anyOf": [{"type": "string"}, {"type": "number"}]}},
|
||||
}
|
||||
|
||||
definition: Dict[str, Any] = {
|
||||
definition = {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
|
||||
@@ -6,9 +6,8 @@
|
||||
"""Schema for gitlab-ci.yaml configuration file.
|
||||
|
||||
.. literalinclude:: ../spack/schema/gitlab_ci.py
|
||||
:lines: 15-
|
||||
:lines: 13-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
@@ -36,7 +35,7 @@
|
||||
|
||||
runner_selector_schema = {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
"additionalProperties": False,
|
||||
"required": ["tags"],
|
||||
"properties": runner_attributes_schema_items,
|
||||
}
|
||||
@@ -113,7 +112,7 @@
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {"gitlab-ci": gitlab_ci_properties}
|
||||
properties = {"gitlab-ci": gitlab_ci_properties}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
|
||||
@@ -6,10 +6,8 @@
|
||||
"""Schema for configuration merged into one file.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/merged.py
|
||||
:lines: 32-
|
||||
:lines: 39-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
import spack.schema.bootstrap
|
||||
@@ -26,10 +24,9 @@
|
||||
import spack.schema.packages
|
||||
import spack.schema.repos
|
||||
import spack.schema.upstreams
|
||||
import spack.schema.view
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = union_dicts(
|
||||
properties = union_dicts(
|
||||
spack.schema.bootstrap.properties,
|
||||
spack.schema.cdash.properties,
|
||||
spack.schema.compilers.properties,
|
||||
@@ -44,7 +41,6 @@
|
||||
spack.schema.packages.properties,
|
||||
spack.schema.repos.properties,
|
||||
spack.schema.upstreams.properties,
|
||||
spack.schema.view.properties,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -6,9 +6,8 @@
|
||||
"""Schema for mirrors.yaml configuration file.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/mirrors.py
|
||||
:lines: 13-
|
||||
:lines: 12-69
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
#: Common properties for connection specification
|
||||
connection = {
|
||||
@@ -51,7 +50,7 @@
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {
|
||||
properties = {
|
||||
"mirrors": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
|
||||
@@ -6,10 +6,8 @@
|
||||
"""Schema for modules.yaml configuration file.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/modules.py
|
||||
:lines: 16-
|
||||
:lines: 13-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
import spack.schema.environment
|
||||
import spack.schema.projections
|
||||
|
||||
@@ -143,7 +141,7 @@
|
||||
|
||||
|
||||
# Properties for inclusion into other schemas (requires definitions)
|
||||
properties: Dict[str, Any] = {
|
||||
properties = {
|
||||
"modules": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
|
||||
@@ -5,10 +5,8 @@
|
||||
"""Schema for packages.yaml configuration files.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/packages.py
|
||||
:lines: 14-
|
||||
:lines: 13-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
import spack.schema.environment
|
||||
|
||||
permissions = {
|
||||
@@ -56,24 +54,6 @@
|
||||
]
|
||||
}
|
||||
|
||||
prefer_and_conflict = {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"spec": {"type": "string"},
|
||||
"message": {"type": "string"},
|
||||
"when": {"type": "string"},
|
||||
},
|
||||
},
|
||||
{"type": "string"},
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
permissions = {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
@@ -93,7 +73,7 @@
|
||||
REQUIREMENT_URL = "https://spack.readthedocs.io/en/latest/packages_yaml.html#package-requirements"
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {
|
||||
properties = {
|
||||
"packages": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
@@ -105,8 +85,6 @@
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"require": requirements,
|
||||
"prefer": prefer_and_conflict,
|
||||
"conflict": prefer_and_conflict,
|
||||
"version": {}, # Here only to warn users on ignored properties
|
||||
"target": {
|
||||
"type": "array",
|
||||
@@ -155,8 +133,6 @@
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"require": requirements,
|
||||
"prefer": prefer_and_conflict,
|
||||
"conflict": prefer_and_conflict,
|
||||
"version": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
@@ -210,6 +186,7 @@
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
|
||||
@@ -6,12 +6,12 @@
|
||||
"""Schema for projections.yaml configuration file.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/projections.py
|
||||
:lines: 14-
|
||||
:lines: 13-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {
|
||||
properties = {
|
||||
"projections": {"type": "object", "patternProperties": {r"all|\w[\w-]*": {"type": "string"}}}
|
||||
}
|
||||
|
||||
|
||||
@@ -6,14 +6,12 @@
|
||||
"""Schema for repos.yaml configuration file.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/repos.py
|
||||
:lines: 14-
|
||||
:lines: 13-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {
|
||||
"repos": {"type": "array", "default": [], "items": {"type": "string"}}
|
||||
}
|
||||
properties = {"repos": {"type": "array", "default": [], "items": {"type": "string"}}}
|
||||
|
||||
|
||||
#: Full schema with metadata
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for spack environment
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/spack.py
|
||||
:lines: 20-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
import spack.schema
|
||||
import spack.schema.gitlab_ci as ci_schema # DEPRECATED
|
||||
import spack.schema.merged as merged_schema
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {
|
||||
"spack": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": union_dicts(
|
||||
# Include deprecated "gitlab-ci" section
|
||||
ci_schema.properties,
|
||||
# merged configuration scope schemas
|
||||
merged_schema.properties,
|
||||
# extra environment schema properties
|
||||
{
|
||||
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
"specs": spack.schema.spec_list_schema,
|
||||
},
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack environment file schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
||||
@@ -8,9 +8,9 @@
|
||||
TODO: This needs to be updated? Especially the hashes under properties.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/spec.py
|
||||
:lines: 15-
|
||||
:lines: 13-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
target = {
|
||||
"oneOf": [
|
||||
@@ -57,7 +57,7 @@
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {
|
||||
properties = {
|
||||
"spec": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {
|
||||
properties = {
|
||||
"upstreams": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for view
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/view.py
|
||||
:lines: 15-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
import spack.schema
|
||||
|
||||
projections_scheme = spack.schema.projections.properties["projections"]
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {
|
||||
"view": {
|
||||
"anyOf": [
|
||||
{"type": "boolean"},
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
r"\w+": {
|
||||
"required": ["root"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"root": {"type": "string"},
|
||||
"link": {"type": "string", "pattern": "(roots|all|run)"},
|
||||
"link_type": {"type": "string"},
|
||||
"select": {"type": "array", "items": {"type": "string"}},
|
||||
"exclude": {"type": "array", "items": {"type": "string"}},
|
||||
"projections": projections_scheme,
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack view configuration file schema",
|
||||
"properties": properties,
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -45,9 +45,6 @@
|
||||
:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "link"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)), internal_error("link dependency out of the root unification set").
|
||||
:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "run"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)), internal_error("run dependency out of the root unification set").
|
||||
|
||||
% Namespaces are statically assigned by a package fact
|
||||
attr("namespace", node(ID, Package), Namespace) :- attr("node", node(ID, Package)), pkg_fact(Package, namespace(Namespace)).
|
||||
|
||||
% Rules on "unification sets", i.e. on sets of nodes allowing a single configuration of any given package
|
||||
unify(SetID, PackageName) :- unification_set(SetID, node(_, PackageName)).
|
||||
:- 2 { unification_set(SetID, node(_, PackageName)) }, unify(SetID, PackageName).
|
||||
@@ -698,26 +695,6 @@ requirement_group_satisfied(node(ID, Package), X) :-
|
||||
activate_requirement(node(ID, Package), X),
|
||||
requirement_group(Package, X).
|
||||
|
||||
% Do not impose requirements, if the conditional requirement is not active
|
||||
do_not_impose(EffectID, node(ID, Package)) :-
|
||||
trigger_condition_holds(TriggerID, node(ID, Package)),
|
||||
pkg_fact(Package, condition_trigger(ConditionID, TriggerID)),
|
||||
pkg_fact(Package, condition_effect(ConditionID, EffectID)),
|
||||
requirement_group_member(ConditionID , Package, RequirementID),
|
||||
not activate_requirement(node(ID, Package), RequirementID).
|
||||
|
||||
% When we have a required provider, we need to ensure that the provider/2 facts respect
|
||||
% the requirement. This is particularly important for packages that could provide multiple
|
||||
% virtuals independently
|
||||
required_provider(Provider, Virtual)
|
||||
:- requirement_group_member(ConditionID, Virtual, RequirementID),
|
||||
condition_holds(ConditionID, _),
|
||||
virtual(Virtual),
|
||||
pkg_fact(Virtual, condition_effect(ConditionID, EffectID)),
|
||||
imposed_constraint(EffectID, "node", Provider).
|
||||
|
||||
:- provider(node(Y, Package), node(X, Virtual)), required_provider(Provider, Virtual), Package != Provider.
|
||||
|
||||
% TODO: the following two choice rules allow the solver to add compiler
|
||||
% flags if their only source is from a requirement. This is overly-specific
|
||||
% and should use a more-generic approach like in https://github.com/spack/spack/pull/37180
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user