Compare commits
29 Commits
bugfix-spa
...
features/t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6737591016 | ||
|
|
66bb19084c | ||
|
|
9cdb25497d | ||
|
|
26c5f5265d | ||
|
|
a16d10edc9 | ||
|
|
0d021717ec | ||
|
|
231d537a2e | ||
|
|
452a84d9eb | ||
|
|
b03d5b7885 | ||
|
|
e0aa378f9e | ||
|
|
2c2282dcb4 | ||
|
|
f27eb55f29 | ||
|
|
019957a225 | ||
|
|
7472a1db8a | ||
|
|
fb6ffc45d7 | ||
|
|
ebf4c8b445 | ||
|
|
79610ce80e | ||
|
|
462379b92f | ||
|
|
893d56ac2c | ||
|
|
b4f938adf4 | ||
|
|
6623209ba7 | ||
|
|
6eaaaa4ae7 | ||
|
|
89e0cf886d | ||
|
|
e24bd2ef3c | ||
|
|
2c43131aca | ||
|
|
d792121cde | ||
|
|
ce3fcf011f | ||
|
|
e66ae3959d | ||
|
|
02513eae7e |
5
.github/workflows/sync-packages.yaml
vendored
5
.github/workflows/sync-packages.yaml
vendored
@@ -27,10 +27,7 @@ jobs:
|
||||
- name: Sync spack/spack-packages with spack/spack
|
||||
run: |
|
||||
cd spack-packages
|
||||
git-filter-repo --quiet --source ../spack \
|
||||
--subdirectory-filter var/spack/repos \
|
||||
--path share/spack/gitlab/cloud_pipelines/ --path-rename share/spack/gitlab/cloud_pipelines/:.ci/gitlab/ \
|
||||
--refs develop
|
||||
git-filter-repo --quiet --source ../spack --subdirectory-filter var/spack/repos --refs develop
|
||||
- name: Push
|
||||
run: |
|
||||
cd spack-packages
|
||||
|
||||
@@ -66,7 +66,7 @@ on these ideas for each distinct build system that Spack supports:
|
||||
build_systems/rocmpackage
|
||||
build_systems/sourceforgepackage
|
||||
|
||||
For reference, the :py:mod:`Build System API docs <spack_repo.builtin.build_systems>`
|
||||
For reference, the :py:mod:`Build System API docs <spack.build_systems>`
|
||||
provide a list of build systems and methods/attributes that can be
|
||||
overridden. If you are curious about the implementation of a particular
|
||||
build system, you can view the source code by running:
|
||||
@@ -90,7 +90,7 @@ packages. You can quickly find examples by running:
|
||||
You can then view these packages with ``spack edit``.
|
||||
|
||||
This guide is intended to supplement the
|
||||
:py:mod:`Build System API docs <spack_repo.builtin.build_systems>` with examples of
|
||||
:py:mod:`Build System API docs <spack.build_systems>` with examples of
|
||||
how to override commonly used methods. It also provides rules of thumb
|
||||
and suggestions for package developers who are unfamiliar with a
|
||||
particular build system.
|
||||
|
||||
@@ -129,8 +129,8 @@ Adding flags to cmake
|
||||
To add additional flags to the ``cmake`` call, simply override the
|
||||
``cmake_args`` function. The following example defines values for the flags
|
||||
``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with
|
||||
and without the :meth:`~spack_repo.builtin.build_systems.cmake.CMakeBuilder.define` and
|
||||
:meth:`~spack_repo.builtin.build_systems.cmake.CMakeBuilder.define_from_variant` helper functions:
|
||||
and without the :meth:`~spack.build_systems.cmake.CMakeBuilder.define` and
|
||||
:meth:`~spack.build_systems.cmake.CMakeBuilder.define_from_variant` helper functions:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
||||
@@ -36,7 +36,6 @@
|
||||
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
|
||||
sys.path.append(os.path.abspath("_spack_root/lib/spack/"))
|
||||
sys.path.append(os.path.abspath("_spack_root/var/spack/repos/"))
|
||||
|
||||
# Add the Spack bin directory to the path so that we can use its output in docs.
|
||||
os.environ["SPACK_ROOT"] = os.path.abspath("_spack_root")
|
||||
@@ -76,20 +75,11 @@
|
||||
apidoc_args
|
||||
+ [
|
||||
"_spack_root/lib/spack/spack",
|
||||
"_spack_root/lib/spack/spack/package.py", # sphinx struggles with os.chdir re-export.
|
||||
"_spack_root/lib/spack/spack/test/*.py",
|
||||
"_spack_root/lib/spack/spack/test/cmd/*.py",
|
||||
]
|
||||
)
|
||||
sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/llnl"])
|
||||
sphinx_apidoc(
|
||||
apidoc_args
|
||||
+ [
|
||||
"--implicit-namespaces",
|
||||
"_spack_root/var/spack/repos/spack_repo",
|
||||
"_spack_root/var/spack/repos/spack_repo/builtin/packages",
|
||||
]
|
||||
)
|
||||
|
||||
# Enable todo items
|
||||
todo_include_todos = True
|
||||
@@ -218,7 +208,7 @@ def setup(sphinx):
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
("py:class", "spack_repo.builtin.build_systems._checks.BuilderWithDefaults"),
|
||||
("py:class", "spack.build_systems._checks.BuilderWithDefaults"),
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
|
||||
@@ -103,7 +103,6 @@ or refer to the full manual below.
|
||||
:caption: API Docs
|
||||
|
||||
Spack API Docs <spack>
|
||||
Spack Builtin Repo <spack_repo>
|
||||
LLNL API Docs <llnl>
|
||||
|
||||
==================
|
||||
|
||||
@@ -69,7 +69,7 @@ An example for ``CMake`` is, for instance:
|
||||
|
||||
The predefined steps for each build system are called "phases".
|
||||
In general, the name and order in which the phases will be executed can be
|
||||
obtained by either reading the API docs at :py:mod:`~.spack_repo.builtin.build_systems`, or
|
||||
obtained by either reading the API docs at :py:mod:`~.spack.build_systems`, or
|
||||
using the ``spack info`` command:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -158,7 +158,7 @@ builder class explicitly. Using the same example as above, this reads:
|
||||
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
||||
return url_fmt.format(version)
|
||||
|
||||
class CMakeBuilder(spack_repo.builtin.build_systems.cmake.CMakeBuilder):
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
args = [
|
||||
self.define_from_variant("BUILD_CODEC", "codec"),
|
||||
@@ -256,7 +256,7 @@ for details):
|
||||
#
|
||||
# See the Spack documentation for more information on packaging.
|
||||
# ----------------------------------------------------------------------------
|
||||
import spack_repo.builtin.build_systems.autotools
|
||||
import spack.build_systems.autotools
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -3697,60 +3697,60 @@ the build system. The build systems currently supported by Spack are:
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| **API docs** | **Description** |
|
||||
+==========================================================+==================================+
|
||||
| :class:`~spack_repo.builtin.build_systems.generic` | Generic build system without any |
|
||||
| :class:`~spack.build_systems.generic` | Generic build system without any |
|
||||
| | base implementation |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.makefile` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.makefile` | Specialized build system for |
|
||||
| | software built invoking |
|
||||
| | hand-written Makefiles |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.autotools` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.autotools` | Specialized build system for |
|
||||
| | software built using |
|
||||
| | GNU Autotools |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.cmake` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.cmake` | Specialized build system for |
|
||||
| | software built using CMake |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.maven` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.maven` | Specialized build system for |
|
||||
| | software built using Maven |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.meson` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.meson` | Specialized build system for |
|
||||
| | software built using Meson |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.nmake` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.nmake` | Specialized build system for |
|
||||
| | software built using NMake |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.qmake` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.qmake` | Specialized build system for |
|
||||
| | software built using QMake |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.scons` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.scons` | Specialized build system for |
|
||||
| | software built using SCons |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.waf` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.waf` | Specialized build system for |
|
||||
| | software built using Waf |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.r` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.r` | Specialized build system for |
|
||||
| | R extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.octave` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.octave` | Specialized build system for |
|
||||
| | Octave packages |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.python` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.python` | Specialized build system for |
|
||||
| | Python extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.perl` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.perl` | Specialized build system for |
|
||||
| | Perl extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.ruby` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.ruby` | Specialized build system for |
|
||||
| | Ruby extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.intel` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.intel` | Specialized build system for |
|
||||
| | licensed Intel software |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.oneapi` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.oneapi` | Specialized build system for |
|
||||
| | Intel oneAPI software |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.aspell_dict` | Specialized build system for |
|
||||
| :class:`~spack.build_systems.aspell_dict` | Specialized build system for |
|
||||
| | Aspell dictionaries |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
|
||||
@@ -3762,7 +3762,7 @@ the build system. The build systems currently supported by Spack are:
|
||||
rare cases where manual intervention is needed we need to stress that a
|
||||
package base class depends on the *build system* being used, not the language of the package.
|
||||
For example, a Python extension installed with CMake would ``extends("python")`` and
|
||||
subclass from :class:`~spack_repo.builtin.build_systems.cmake.CMakePackage`.
|
||||
subclass from :class:`~spack.build_systems.cmake.CMakePackage`.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Overriding builder methods
|
||||
@@ -3770,7 +3770,7 @@ Overriding builder methods
|
||||
|
||||
Build-system "phases" have default implementations that fit most of the common cases:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/build_systems/autotools.py
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/build_systems/autotools.py
|
||||
:pyobject: AutotoolsBuilder.configure
|
||||
:linenos:
|
||||
|
||||
@@ -3784,7 +3784,7 @@ configure arguments:
|
||||
|
||||
Each specific build system has a list of attributes and methods that can be overridden to
|
||||
fine-tune the installation of a package without overriding an entire phase. To
|
||||
have more information on them the place to go is the API docs of the :py:mod:`~.spack_repo.builtin.build_systems`
|
||||
have more information on them the place to go is the API docs of the :py:mod:`~.spack.build_systems`
|
||||
module.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -3826,7 +3826,7 @@ If the ``package.py`` has build instructions in a separate
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CMakeBuilder(spack_repo.builtin.build_systems.cmake.CMakeBuilder):
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
def install(self, pkg, spec, prefix):
|
||||
...
|
||||
|
||||
@@ -3839,32 +3839,31 @@ Mixin base classes
|
||||
Besides build systems, there are other cases where common metadata and behavior can be extracted
|
||||
and reused by many packages. For instance, packages that depend on ``Cuda`` or ``Rocm``, share
|
||||
common dependencies and constraints. To factor these attributes into a single place, Spack provides
|
||||
a few mixin classes in the ``spack_repo.builtin.build_systems`` module:
|
||||
a few mixin classes in the ``spack.build_systems`` module:
|
||||
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| **API docs** | **Description** |
|
||||
+============================================================================+==================================+
|
||||
| :class:`~spack_repo.builtin.build_systems.cuda.CudaPackage` | A helper class for packages that |
|
||||
| | use CUDA |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.rocm.ROCmPackage` | A helper class for packages that |
|
||||
| | use ROCm |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.gnu.GNUMirrorPackage` | A helper class for GNU packages |
|
||||
| | |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.python.PythonExtension` | A helper class for Python |
|
||||
| | extensions |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.sourceforge.SourceforgePackage` | A helper class for packages |
|
||||
| | from sourceforge.org |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.sourceware.SourcewarePackage` | A helper class for packages |
|
||||
| | from sourceware.org |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.xorg.XorgPackage` | A helper class for x.org |
|
||||
| | packages |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| **API docs** | **Description** |
|
||||
+===============================================================+==================================+
|
||||
| :class:`~spack.build_systems.cuda.CudaPackage` | A helper class for packages that |
|
||||
| | use CUDA |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.rocm.ROCmPackage` | A helper class for packages that |
|
||||
| | use ROCm |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.gnu.GNUMirrorPackage` | A helper class for GNU packages |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.python.PythonExtension` | A helper class for Python |
|
||||
| | extensions |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.sourceforge.SourceforgePackage` | A helper class for packages |
|
||||
| | from sourceforge.org |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.sourceware.SourcewarePackage` | A helper class for packages |
|
||||
| | from sourceware.org |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.xorg.XorgPackage` | A helper class for x.org |
|
||||
| | packages |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
|
||||
These classes should be used by adding them to the inheritance tree of the package that needs them,
|
||||
for instance:
|
||||
@@ -3908,13 +3907,13 @@ Additional build instructions are split into separate builder classes:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CMakeBuilder(spack_repo.builtin.build_systems.cmake.CMakeBuilder):
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
return [
|
||||
self.define_from_variant("MY_FEATURE", "my_feature")
|
||||
]
|
||||
|
||||
class AutotoolsBuilder(spack_repo.builtin.build_systems.autotools.AutotoolsBuilder):
|
||||
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
return self.with_or_without("my-feature", variant="my_feature")
|
||||
|
||||
|
||||
@@ -49,19 +49,10 @@ class CudaPackage(PackageBase):
|
||||
"90a",
|
||||
"100",
|
||||
"100a",
|
||||
"100f",
|
||||
"101",
|
||||
"101a",
|
||||
"101f",
|
||||
"103",
|
||||
"103a",
|
||||
"103f",
|
||||
"120",
|
||||
"120a",
|
||||
"120f",
|
||||
"121",
|
||||
"121a",
|
||||
"121f",
|
||||
)
|
||||
|
||||
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
||||
@@ -164,15 +155,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
depends_on("cuda@12.8:", when="cuda_arch=101a")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=120")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=120a")
|
||||
depends_on("cuda@12.9:", when="cuda_arch=100f")
|
||||
depends_on("cuda@12.9:", when="cuda_arch=101f")
|
||||
depends_on("cuda@12.9:", when="cuda_arch=120f")
|
||||
depends_on("cuda@12.9:", when="cuda_arch=103")
|
||||
depends_on("cuda@12.9:", when="cuda_arch=103a")
|
||||
depends_on("cuda@12.9:", when="cuda_arch=103f")
|
||||
depends_on("cuda@12.9:", when="cuda_arch=121")
|
||||
depends_on("cuda@12.9:", when="cuda_arch=121a")
|
||||
depends_on("cuda@12.9:", when="cuda_arch=121f")
|
||||
# From the NVIDIA install guide we know of conflicts for particular
|
||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
||||
@@ -204,7 +186,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
|
||||
conflicts("%gcc@15:", when="+cuda ^cuda@:12.9")
|
||||
conflicts("%gcc@15:", when="+cuda ^cuda@:12.8")
|
||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||
@@ -213,7 +195,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
|
||||
conflicts("%clang@20:", when="+cuda ^cuda@:12.9")
|
||||
conflicts("%clang@20:", when="+cuda ^cuda@:12.8")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
@@ -24,10 +24,6 @@ class MSBuildPackage(spack.package_base.PackageBase):
|
||||
build_system("msbuild")
|
||||
conflicts("platform=linux", when="build_system=msbuild")
|
||||
conflicts("platform=darwin", when="build_system=msbuild")
|
||||
conflicts("platform=freebsd", when="build_system=msbuild")
|
||||
|
||||
def define(self, msbuild_arg, value):
|
||||
return define(msbuild_arg, value)
|
||||
|
||||
|
||||
@spack.builder.builder("msbuild")
|
||||
@@ -91,7 +87,7 @@ def define_targets(self, *targets):
|
||||
return "/target:" + ";".join(targets) if targets else ""
|
||||
|
||||
def define(self, msbuild_arg, value):
|
||||
return define(msbuild_arg, value)
|
||||
return "/p:{}={}".format(msbuild_arg, value)
|
||||
|
||||
def msbuild_args(self):
|
||||
"""Define build arguments to MSbuild. This is an empty list by default.
|
||||
@@ -125,7 +121,3 @@ def install(
|
||||
pkg.module.msbuild(
|
||||
*self.msbuild_install_args(), self.define_targets(*self.install_targets)
|
||||
)
|
||||
|
||||
|
||||
def define(msbuild_arg, value):
|
||||
return "/p:{}={}".format(msbuild_arg, value)
|
||||
@@ -9,7 +9,7 @@
|
||||
import re
|
||||
import sys
|
||||
from collections import Counter
|
||||
from typing import Generator, List, Optional, Sequence, Union
|
||||
from typing import List, Optional, Union
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.tty as tty
|
||||
@@ -704,67 +704,6 @@ def first_line(docstring):
|
||||
return docstring.split("\n")[0]
|
||||
|
||||
|
||||
def group_arguments(
|
||||
args: Sequence[str],
|
||||
*,
|
||||
max_group_size: int = 500,
|
||||
prefix_length: int = 0,
|
||||
max_group_length: Optional[int] = None,
|
||||
) -> Generator[List[str], None, None]:
|
||||
"""Splits the supplied list of arguments into groups for passing to CLI tools.
|
||||
|
||||
When passing CLI arguments, we need to ensure that argument lists are no longer than
|
||||
the system command line size limit, and we may also need to ensure that groups are
|
||||
no more than some number of arguments long.
|
||||
|
||||
This returns an iterator over lists of arguments that meet these constraints.
|
||||
Arguments are in the same order they appeared in the original argument list.
|
||||
|
||||
If any argument's length is greater than the max_group_length, this will raise a
|
||||
``ValueError``.
|
||||
|
||||
Arguments:
|
||||
args: list of arguments to split into groups
|
||||
max_group_size: max number of elements in any group (default 500)
|
||||
prefix_length: length of any additional arguments (including spaces) to be passed before
|
||||
the groups from args; default is 0 characters
|
||||
max_group_length: max length of characters that if a group of args is joined by " "
|
||||
On unix, ths defaults to SC_ARG_MAX from sysconf. On Windows the default is
|
||||
the max usable for CreateProcess (32,768 chars)
|
||||
|
||||
"""
|
||||
if max_group_length is None:
|
||||
max_group_length = 32768 # default to the Windows limit
|
||||
if hasattr(os, "sysconf"): # sysconf is only on unix
|
||||
try:
|
||||
sysconf_max = os.sysconf("SC_ARG_MAX")
|
||||
if sysconf_max != -1: # returns -1 if an option isn't present
|
||||
max_group_length = sysconf_max
|
||||
except (ValueError, OSError):
|
||||
pass # keep windows default if SC_ARG_MAX isn't in sysconf_names
|
||||
|
||||
group: List[str] = []
|
||||
grouplen, space = prefix_length, 0
|
||||
for arg in args:
|
||||
arglen = len(arg)
|
||||
if arglen > max_group_length:
|
||||
raise ValueError(f"Argument is longer than max command line size: '{arg}'")
|
||||
if arglen + prefix_length > max_group_length:
|
||||
raise ValueError(f"Argument with prefix is longer than max command line size: '{arg}'")
|
||||
|
||||
next_grouplen = grouplen + arglen + space
|
||||
if len(group) == max_group_size or next_grouplen > max_group_length:
|
||||
yield group
|
||||
group, grouplen, space = [], prefix_length, 0
|
||||
|
||||
group.append(arg)
|
||||
grouplen += arglen + space
|
||||
space = 1 # add a space for elements 1, 2, etc. but not 0
|
||||
|
||||
if group:
|
||||
yield group
|
||||
|
||||
|
||||
class CommandNotFoundError(spack.error.SpackError):
|
||||
"""Exception class thrown when a requested command is not recognized as
|
||||
such.
|
||||
|
||||
@@ -52,7 +52,6 @@
|
||||
# See the Spack documentation for more information on packaging.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
{package_class_import}
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -86,7 +85,6 @@ class BundlePackageTemplate:
|
||||
"""
|
||||
|
||||
base_class_name = "BundlePackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.bundle import BundlePackage"
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required.
|
||||
@@ -116,7 +114,6 @@ def write(self, pkg_path):
|
||||
name=self.name,
|
||||
class_name=self.class_name,
|
||||
base_class_name=self.base_class_name,
|
||||
package_class_import=self.package_class_import,
|
||||
url_def=self.url_def,
|
||||
versions=self.versions,
|
||||
dependencies="\n".join(all_deps),
|
||||
@@ -129,7 +126,6 @@ class PackageTemplate(BundlePackageTemplate):
|
||||
"""Provides the default values to be used for the package file template"""
|
||||
|
||||
base_class_name = "Package"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.generic import Package"
|
||||
|
||||
body_def = """\
|
||||
def install(self, spec, prefix):
|
||||
@@ -150,9 +146,6 @@ class AutotoolsPackageTemplate(PackageTemplate):
|
||||
that *do* come with a ``configure`` script"""
|
||||
|
||||
base_class_name = "AutotoolsPackage"
|
||||
package_class_import = (
|
||||
"from spack_repo.builtin.build_systems.autotools import AutotoolsPackage"
|
||||
)
|
||||
|
||||
body_def = """\
|
||||
def configure_args(self):
|
||||
@@ -167,9 +160,6 @@ class AutoreconfPackageTemplate(PackageTemplate):
|
||||
that *do not* come with a ``configure`` script"""
|
||||
|
||||
base_class_name = "AutotoolsPackage"
|
||||
package_class_import = (
|
||||
"from spack_repo.builtin.build_systems.autotools import AutotoolsPackage"
|
||||
)
|
||||
|
||||
dependencies = """\
|
||||
depends_on("autoconf", type="build")
|
||||
@@ -196,7 +186,6 @@ class CargoPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for cargo-based packages"""
|
||||
|
||||
base_class_name = "CargoPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.cargo import CargoPackage"
|
||||
|
||||
body_def = ""
|
||||
|
||||
@@ -205,7 +194,6 @@ class CMakePackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for CMake-based packages"""
|
||||
|
||||
base_class_name = "CMakePackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.cmake import CMakePackage"
|
||||
|
||||
body_def = """\
|
||||
def cmake_args(self):
|
||||
@@ -220,7 +208,6 @@ class GoPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Go-module-based packages"""
|
||||
|
||||
base_class_name = "GoPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.go import GoPackage"
|
||||
|
||||
body_def = ""
|
||||
|
||||
@@ -229,7 +216,6 @@ class LuaPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for LuaRocks-based packages"""
|
||||
|
||||
base_class_name = "LuaPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.lua import LuaPackage"
|
||||
|
||||
body_def = """\
|
||||
def luarocks_args(self):
|
||||
@@ -251,7 +237,6 @@ class MesonPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for meson-based packages"""
|
||||
|
||||
base_class_name = "MesonPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.meson import MesonPackage"
|
||||
|
||||
body_def = """\
|
||||
def meson_args(self):
|
||||
@@ -264,7 +249,6 @@ class QMakePackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for QMake-based packages"""
|
||||
|
||||
base_class_name = "QMakePackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.qmake import QMakePackage"
|
||||
|
||||
body_def = """\
|
||||
def qmake_args(self):
|
||||
@@ -277,7 +261,6 @@ class MavenPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Maven-based packages"""
|
||||
|
||||
base_class_name = "MavenPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.maven import MavenPackage"
|
||||
|
||||
body_def = """\
|
||||
def build(self, spec, prefix):
|
||||
@@ -289,7 +272,6 @@ class SconsPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for SCons-based packages"""
|
||||
|
||||
base_class_name = "SConsPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.scons import SConsPackage"
|
||||
|
||||
body_def = """\
|
||||
def build_args(self, spec, prefix):
|
||||
@@ -303,7 +285,6 @@ class WafPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate override for Waf-based packages"""
|
||||
|
||||
base_class_name = "WafPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.waf import WafPackage"
|
||||
|
||||
body_def = """\
|
||||
# FIXME: Override configure_args(), build_args(),
|
||||
@@ -327,7 +308,6 @@ class RacketPackageTemplate(PackageTemplate):
|
||||
"""Provides approriate overrides for Racket extensions"""
|
||||
|
||||
base_class_name = "RacketPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.racket import RacketPackage"
|
||||
|
||||
url_line = """\
|
||||
# FIXME: set the proper location from which to fetch your package
|
||||
@@ -365,7 +345,6 @@ class PythonPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for python extensions"""
|
||||
|
||||
base_class_name = "PythonPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.python import PythonPackage"
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Only add the python/pip/wheel dependencies if you need specific versions
|
||||
@@ -453,7 +432,6 @@ class RPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for R extensions"""
|
||||
|
||||
base_class_name = "RPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.r import RPackage"
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required.
|
||||
@@ -494,7 +472,6 @@ class PerlmakePackageTemplate(PackageTemplate):
|
||||
that come with a Makefile.PL"""
|
||||
|
||||
base_class_name = "PerlPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.perl import PerlPackage"
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required:
|
||||
@@ -532,7 +509,6 @@ class OctavePackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for octave packages"""
|
||||
|
||||
base_class_name = "OctavePackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.octave import OctavePackage"
|
||||
|
||||
dependencies = """\
|
||||
extends("octave")
|
||||
@@ -555,7 +531,6 @@ class RubyPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Ruby packages"""
|
||||
|
||||
base_class_name = "RubyPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.ruby import RubyPackage"
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required. Only add the ruby dependency
|
||||
@@ -584,7 +559,6 @@ class MakefilePackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Makefile packages"""
|
||||
|
||||
base_class_name = "MakefilePackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.makefile import MakefilePackage"
|
||||
|
||||
body_def = """\
|
||||
def edit(self, spec, prefix):
|
||||
@@ -599,7 +573,6 @@ class IntelPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for licensed Intel software"""
|
||||
|
||||
base_class_name = "IntelOneApiPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.oneapi import IntelOneApiPackage"
|
||||
|
||||
body_def = """\
|
||||
# FIXME: Override `setup_environment` if necessary."""
|
||||
@@ -609,7 +582,6 @@ class SIPPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for SIP packages."""
|
||||
|
||||
base_class_name = "SIPPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.sip import SIPPackage"
|
||||
|
||||
body_def = """\
|
||||
def configure_args(self, spec, prefix):
|
||||
|
||||
@@ -28,7 +28,7 @@ def setup_parser(subparser):
|
||||
"--build-system",
|
||||
dest="path",
|
||||
action="store_const",
|
||||
const=os.path.join(spack.repo.PATH.repos[0].root, "build_systems"),
|
||||
const=spack.paths.build_systems_path,
|
||||
help="edit the build system with the supplied name",
|
||||
)
|
||||
excl_args.add_argument(
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import itertools
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -181,23 +182,21 @@ def pkg_grep(args, unknown_args):
|
||||
if "GNU" in grep("--version", output=str):
|
||||
grep.add_default_arg("--color=auto")
|
||||
|
||||
all_paths = spack.repo.PATH.all_package_paths()
|
||||
if not all_paths:
|
||||
return 0 # no packages to search
|
||||
|
||||
# these args start every command invocation (grep arg1 arg2 ...)
|
||||
all_prefix_args = grep.exe + args.grep_args + unknown_args
|
||||
prefix_length = sum(len(arg) for arg in all_prefix_args) + len(all_prefix_args)
|
||||
# determines number of files to grep at a time
|
||||
grouper = lambda e: e[0] // 500
|
||||
|
||||
# set up iterator and save the first group to ensure we don't end up with a group of size 1
|
||||
groups = spack.cmd.group_arguments(all_paths, prefix_length=prefix_length)
|
||||
groups = itertools.groupby(enumerate(spack.repo.PATH.all_package_paths()), grouper)
|
||||
if not groups:
|
||||
return 0 # no packages to search
|
||||
|
||||
# You can force GNU grep to show filenames on every line with -H, but not POSIX grep.
|
||||
# POSIX grep only shows filenames when you're grepping 2 or more files. Since we
|
||||
# don't know which one we're running, we ensure there are always >= 2 files by
|
||||
# saving the prior group of paths and adding it to a straggling group of 1 if needed.
|
||||
# This works unless somehow there is only one package in all of Spack.
|
||||
prior_paths = next(groups)
|
||||
_, first_group = next(groups)
|
||||
prior_paths = [path for _, path in first_group]
|
||||
|
||||
# grep returns 1 for nothing found, 0 for something found, and > 1 for error
|
||||
return_code = 1
|
||||
@@ -208,7 +207,9 @@ def grep_group(paths):
|
||||
grep(*all_args, fail_on_error=False)
|
||||
return grep.returncode
|
||||
|
||||
for paths in groups:
|
||||
for _, group in groups:
|
||||
paths = [path for _, path in group] # extract current path group
|
||||
|
||||
if len(paths) == 1:
|
||||
# Only the very last group can have length 1. If it does, combine
|
||||
# it with the prior group to ensure more than one path is grepped.
|
||||
|
||||
@@ -3,13 +3,11 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
from typing import Any, List, Optional
|
||||
from typing import List
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.config
|
||||
import spack.repo
|
||||
import spack.util.path
|
||||
@@ -67,15 +65,6 @@ def setup_parser(subparser):
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
# Migrate
|
||||
migrate_parser = sp.add_parser("migrate", help=repo_migrate.__doc__)
|
||||
migrate_parser.add_argument(
|
||||
"namespace_or_path", help="path to a Spack package repository directory"
|
||||
)
|
||||
migrate_parser.add_argument(
|
||||
"--fix", action="store_true", help="automatically fix the imports in the package files"
|
||||
)
|
||||
|
||||
|
||||
def repo_create(args):
|
||||
"""create a new package repository"""
|
||||
@@ -166,70 +155,12 @@ def repo_list(args):
|
||||
print(f"{repo.namespace:<{max_ns_len + 4}}{repo.package_api_str:<8}{repo.root}")
|
||||
|
||||
|
||||
def _get_repo(name_or_path: str) -> Optional[spack.repo.Repo]:
|
||||
try:
|
||||
return spack.repo.from_path(name_or_path)
|
||||
except spack.repo.RepoError:
|
||||
pass
|
||||
|
||||
for repo in spack.config.get("repos"):
|
||||
try:
|
||||
r = spack.repo.from_path(repo)
|
||||
except spack.repo.RepoError:
|
||||
continue
|
||||
if r.namespace == name_or_path:
|
||||
return r
|
||||
return None
|
||||
|
||||
|
||||
def repo_migrate(args: Any) -> int:
|
||||
"""migrate a package repository to the latest Package API"""
|
||||
from spack.repo_migrate import migrate_v1_to_v2, migrate_v2_imports
|
||||
|
||||
repo = _get_repo(args.namespace_or_path)
|
||||
|
||||
if repo is None:
|
||||
tty.die(f"No such repository: {args.namespace_or_path}")
|
||||
|
||||
if (1, 0) <= repo.package_api < (2, 0):
|
||||
success, repo_v2 = migrate_v1_to_v2(repo, fix=args.fix)
|
||||
exit_code = 0 if success else 1
|
||||
elif (2, 0) <= repo.package_api < (3, 0):
|
||||
repo_v2 = None
|
||||
exit_code = 0 if migrate_v2_imports(repo.packages_path, repo.root, fix=args.fix) else 1
|
||||
else:
|
||||
repo_v2 = None
|
||||
exit_code = 0
|
||||
|
||||
if exit_code == 0 and isinstance(repo_v2, spack.repo.Repo):
|
||||
tty.info(
|
||||
f"Repository '{repo_v2.namespace}' was successfully migrated from "
|
||||
f"package API {repo.package_api_str} to {repo_v2.package_api_str}."
|
||||
)
|
||||
tty.warn(
|
||||
"Remove the old repository from Spack's configuration and add the new one using:\n"
|
||||
f" spack repo remove {shlex.quote(repo.root)}\n"
|
||||
f" spack repo add {shlex.quote(repo_v2.root)}"
|
||||
)
|
||||
|
||||
elif exit_code == 0:
|
||||
tty.info(f"Repository '{repo.namespace}' was successfully migrated")
|
||||
|
||||
elif not args.fix and exit_code == 1:
|
||||
tty.error(
|
||||
f"No changes were made to the repository {repo.root} with namespace "
|
||||
f"'{repo.namespace}'. Run with --fix to apply the above changes."
|
||||
)
|
||||
|
||||
return exit_code
|
||||
|
||||
|
||||
def repo(parser, args):
|
||||
return {
|
||||
action = {
|
||||
"create": repo_create,
|
||||
"list": repo_list,
|
||||
"add": repo_add,
|
||||
"remove": repo_remove,
|
||||
"rm": repo_remove,
|
||||
"migrate": repo_migrate,
|
||||
}[args.repo_command](args)
|
||||
}
|
||||
action[args.repo_command](args)
|
||||
|
||||
@@ -332,8 +332,18 @@ def process_files(file_list, is_args):
|
||||
|
||||
rewrite_and_print_output(output, args, pat, replacement)
|
||||
|
||||
packages_isort_args = (
|
||||
"--rm",
|
||||
"spack.pkgkit",
|
||||
"--rm",
|
||||
"spack.package_defs",
|
||||
"-a",
|
||||
"from spack.package import *",
|
||||
)
|
||||
packages_isort_args = packages_isort_args + isort_args
|
||||
|
||||
# packages
|
||||
process_files(filter(is_package, file_list), isort_args)
|
||||
process_files(filter(is_package, file_list), packages_isort_args)
|
||||
# non-packages
|
||||
process_files(filter(lambda f: not is_package(f), file_list), isort_args)
|
||||
|
||||
|
||||
@@ -60,6 +60,7 @@
|
||||
import spack.schema.modules
|
||||
import spack.schema.packages
|
||||
import spack.schema.repos
|
||||
import spack.schema.toolchains
|
||||
import spack.schema.upstreams
|
||||
import spack.schema.view
|
||||
import spack.util.remote_file_cache as rfc_util
|
||||
@@ -87,6 +88,7 @@
|
||||
"bootstrap": spack.schema.bootstrap.schema,
|
||||
"ci": spack.schema.ci.schema,
|
||||
"cdash": spack.schema.cdash.schema,
|
||||
"toolchains": spack.schema.toolchains.schema,
|
||||
}
|
||||
|
||||
# Same as above, but including keys for environments
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import errno
|
||||
import glob
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
@@ -2425,11 +2424,19 @@ def display_specs(specs):
|
||||
|
||||
def make_repo_path(root):
|
||||
"""Make a RepoPath from the repo subdirectories in an environment."""
|
||||
repos = [
|
||||
spack.repo.from_path(os.path.dirname(p))
|
||||
for p in glob.glob(os.path.join(root, "**", "repo.yaml"), recursive=True)
|
||||
]
|
||||
return spack.repo.RepoPath(*repos, cache=spack.caches.MISC_CACHE)
|
||||
path = spack.repo.RepoPath(cache=spack.caches.MISC_CACHE)
|
||||
|
||||
if os.path.isdir(root):
|
||||
for repo_root in os.listdir(root):
|
||||
repo_root = os.path.join(root, repo_root)
|
||||
|
||||
if not os.path.isdir(repo_root):
|
||||
continue
|
||||
|
||||
repo = spack.repo.from_path(repo_root)
|
||||
path.put_last(repo)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def manifest_file(env_name_or_dir):
|
||||
|
||||
@@ -1090,12 +1090,13 @@ def _handle_solver_bug(
|
||||
stream=out,
|
||||
)
|
||||
if wrong_output:
|
||||
msg = (
|
||||
"internal solver error: the following specs were concretized, but do not satisfy the "
|
||||
"input:\n - "
|
||||
+ "\n - ".join(str(s) for s, _ in wrong_output)
|
||||
+ "\n Please report a bug at https://github.com/spack/spack/issues"
|
||||
)
|
||||
msg = "internal solver error: the following specs were concretized, but do not satisfy "
|
||||
msg += "the input:\n"
|
||||
for in_spec, out_spec in wrong_output:
|
||||
msg += f" - input: {in_spec}\n"
|
||||
msg += f" output: {out_spec.long_spec}\n"
|
||||
msg += "\n Please report a bug at https://github.com/spack/spack/issues"
|
||||
|
||||
# try to write the input/output specs to a temporary directory for bug reports
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp(prefix="spack-asp-", dir=root)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# flake8: noqa: F401, E402
|
||||
"""spack.package defines the public API for Spack packages, by re-exporting useful symbols from
|
||||
other modules. Packages should import this module, instead of importing from spack.* directly
|
||||
to ensure forward compatibility with future versions of Spack."""
|
||||
@@ -12,6 +13,17 @@
|
||||
# import most common types used in packages
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
|
||||
class tty:
|
||||
import llnl.util.tty as _tty
|
||||
|
||||
debug = _tty.debug
|
||||
error = _tty.error
|
||||
info = _tty.info
|
||||
msg = _tty.msg
|
||||
warn = _tty.warn
|
||||
|
||||
|
||||
from llnl.util.filesystem import (
|
||||
FileFilter,
|
||||
FileList,
|
||||
@@ -49,6 +61,51 @@
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
from spack.build_environment import MakeExecutable
|
||||
from spack.build_systems.aspell_dict import AspellDictPackage
|
||||
from spack.build_systems.autotools import AutotoolsPackage
|
||||
from spack.build_systems.bundle import BundlePackage
|
||||
from spack.build_systems.cached_cmake import (
|
||||
CachedCMakePackage,
|
||||
cmake_cache_filepath,
|
||||
cmake_cache_option,
|
||||
cmake_cache_path,
|
||||
cmake_cache_string,
|
||||
)
|
||||
from spack.build_systems.cargo import CargoPackage
|
||||
from spack.build_systems.cmake import CMakePackage, generator
|
||||
from spack.build_systems.compiler import CompilerPackage
|
||||
from spack.build_systems.cuda import CudaPackage
|
||||
from spack.build_systems.generic import Package
|
||||
from spack.build_systems.gnu import GNUMirrorPackage
|
||||
from spack.build_systems.go import GoPackage
|
||||
from spack.build_systems.intel import IntelPackage
|
||||
from spack.build_systems.lua import LuaPackage
|
||||
from spack.build_systems.makefile import MakefilePackage
|
||||
from spack.build_systems.maven import MavenPackage
|
||||
from spack.build_systems.meson import MesonPackage
|
||||
from spack.build_systems.msbuild import MSBuildPackage
|
||||
from spack.build_systems.nmake import NMakePackage
|
||||
from spack.build_systems.octave import OctavePackage
|
||||
from spack.build_systems.oneapi import (
|
||||
INTEL_MATH_LIBRARIES,
|
||||
IntelOneApiLibraryPackage,
|
||||
IntelOneApiLibraryPackageWithSdk,
|
||||
IntelOneApiPackage,
|
||||
IntelOneApiStaticLibraryList,
|
||||
)
|
||||
from spack.build_systems.perl import PerlPackage
|
||||
from spack.build_systems.python import PythonExtension, PythonPackage
|
||||
from spack.build_systems.qmake import QMakePackage
|
||||
from spack.build_systems.r import RPackage
|
||||
from spack.build_systems.racket import RacketPackage
|
||||
from spack.build_systems.rocm import ROCmPackage
|
||||
from spack.build_systems.ruby import RubyPackage
|
||||
from spack.build_systems.scons import SConsPackage
|
||||
from spack.build_systems.sip import SIPPackage
|
||||
from spack.build_systems.sourceforge import SourceforgePackage
|
||||
from spack.build_systems.sourceware import SourcewarePackage
|
||||
from spack.build_systems.waf import WafPackage
|
||||
from spack.build_systems.xorg import XorgPackage
|
||||
from spack.builder import BaseBuilder
|
||||
from spack.config import determine_number_of_jobs
|
||||
from spack.deptypes import ALL_TYPES as all_deptypes
|
||||
@@ -101,123 +158,6 @@
|
||||
cd = chdir
|
||||
pwd = getcwd
|
||||
|
||||
|
||||
class tty:
|
||||
import llnl.util.tty as _tty
|
||||
|
||||
debug = _tty.debug
|
||||
error = _tty.error
|
||||
info = _tty.info
|
||||
msg = _tty.msg
|
||||
warn = _tty.warn
|
||||
|
||||
|
||||
__all__ = [
|
||||
"chdir",
|
||||
"environ",
|
||||
"getcwd",
|
||||
"makedirs",
|
||||
"mkdir",
|
||||
"remove",
|
||||
"removedirs",
|
||||
"move",
|
||||
"rmtree",
|
||||
"Dict",
|
||||
"List",
|
||||
"Optional",
|
||||
"FileFilter",
|
||||
"FileList",
|
||||
"HeaderList",
|
||||
"LibraryList",
|
||||
"ancestor",
|
||||
"can_access",
|
||||
"change_sed_delimiter",
|
||||
"copy",
|
||||
"copy_tree",
|
||||
"filter_file",
|
||||
"find",
|
||||
"find_all_headers",
|
||||
"find_first",
|
||||
"find_headers",
|
||||
"find_libraries",
|
||||
"find_system_libraries",
|
||||
"force_remove",
|
||||
"force_symlink",
|
||||
"install",
|
||||
"install_tree",
|
||||
"is_exe",
|
||||
"join_path",
|
||||
"keep_modification_time",
|
||||
"library_extensions",
|
||||
"mkdirp",
|
||||
"remove_directory_contents",
|
||||
"remove_linked_tree",
|
||||
"rename",
|
||||
"set_executable",
|
||||
"set_install_permissions",
|
||||
"touch",
|
||||
"working_dir",
|
||||
"symlink",
|
||||
"MakeExecutable",
|
||||
"BaseBuilder",
|
||||
"determine_number_of_jobs",
|
||||
"all_deptypes",
|
||||
"build_system",
|
||||
"can_splice",
|
||||
"conditional",
|
||||
"conflicts",
|
||||
"depends_on",
|
||||
"extends",
|
||||
"license",
|
||||
"maintainers",
|
||||
"patch",
|
||||
"provides",
|
||||
"redistribute",
|
||||
"requires",
|
||||
"resource",
|
||||
"variant",
|
||||
"version",
|
||||
"InstallError",
|
||||
"NoHeadersError",
|
||||
"NoLibrariesError",
|
||||
"SkipTest",
|
||||
"cache_extra_test_sources",
|
||||
"check_outputs",
|
||||
"find_required_file",
|
||||
"get_escaped_text_output",
|
||||
"install_test_root",
|
||||
"test_part",
|
||||
"filter_compiler_wrappers",
|
||||
"default_args",
|
||||
"when",
|
||||
"build_system_flags",
|
||||
"env_flags",
|
||||
"inject_flags",
|
||||
"on_package_attributes",
|
||||
"bash_completion_path",
|
||||
"fish_completion_path",
|
||||
"zsh_completion_path",
|
||||
"run_after",
|
||||
"run_before",
|
||||
"Spec",
|
||||
"EnvironmentModifications",
|
||||
"Executable",
|
||||
"ProcessError",
|
||||
"which",
|
||||
"which_string",
|
||||
"fix_darwin_install_name",
|
||||
"Prefix",
|
||||
"any_combination_of",
|
||||
"auto_or_any_combination_of",
|
||||
"disjoint_sets",
|
||||
"Version",
|
||||
"ver",
|
||||
"env",
|
||||
"cd",
|
||||
"pwd",
|
||||
"tty",
|
||||
]
|
||||
|
||||
# These are just here for editor support; they may be set when the build env is set up.
|
||||
configure: Executable
|
||||
make_jobs: int
|
||||
|
||||
@@ -583,7 +583,7 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
like ``homepage`` and, for a code-based package, ``url``, or functions
|
||||
such as ``install()``.
|
||||
There are many custom ``Package`` subclasses in the
|
||||
``spack_repo.builtin.build_systems`` package that make things even easier for
|
||||
``spack.build_systems`` package that make things even easier for
|
||||
specific build systems.
|
||||
|
||||
"""
|
||||
|
||||
@@ -58,7 +58,7 @@
|
||||
repos_path = os.path.join(var_path, "repos")
|
||||
test_repos_path = os.path.join(var_path, "test_repos")
|
||||
packages_path = os.path.join(repos_path, "spack_repo", "builtin")
|
||||
mock_packages_path = os.path.join(test_repos_path, "spack_repo", "builtin_mock")
|
||||
mock_packages_path = os.path.join(test_repos_path, "builtin.mock")
|
||||
|
||||
#
|
||||
# Writable things in $spack/var/spack
|
||||
|
||||
@@ -79,25 +79,6 @@ def namespace_from_fullname(fullname: str) -> str:
|
||||
return fullname
|
||||
|
||||
|
||||
class _PrependFileLoader(importlib.machinery.SourceFileLoader):
|
||||
def __init__(self, fullname: str, repo: "Repo", package_name: str) -> None:
|
||||
self.repo = repo
|
||||
self.package_name = package_name
|
||||
path = repo.filename_for_package_name(package_name)
|
||||
self.fullname = fullname
|
||||
self.prepend = b"from spack_repo.builtin.build_systems._package_api_v1 import *\n"
|
||||
super().__init__(self.fullname, path)
|
||||
|
||||
def path_stats(self, path):
|
||||
stats = dict(super().path_stats(path))
|
||||
stats["size"] += len(self.prepend)
|
||||
return stats
|
||||
|
||||
def get_data(self, path):
|
||||
data = super().get_data(path)
|
||||
return self.prepend + data if path == self.path else data
|
||||
|
||||
|
||||
class SpackNamespaceLoader:
|
||||
def create_module(self, spec):
|
||||
return SpackNamespace(spec.name)
|
||||
@@ -144,7 +125,8 @@ def compute_loader(self, fullname: str):
|
||||
# With 2 nested conditionals we can call "repo.real_name" only once
|
||||
package_name = repo.real_name(module_name)
|
||||
if package_name:
|
||||
return _PrependFileLoader(fullname, repo, package_name)
|
||||
module_path = repo.filename_for_package_name(package_name)
|
||||
return importlib.machinery.SourceFileLoader(fullname, module_path)
|
||||
|
||||
# We are importing a full namespace like 'spack.pkg.builtin'
|
||||
if fullname == repo.full_namespace:
|
||||
@@ -173,7 +155,7 @@ def compute_loader(self, fullname: str):
|
||||
def builtin_repo() -> "Repo":
|
||||
"""Get the test repo if it is active, otherwise the builtin repo."""
|
||||
try:
|
||||
return PATH.get_repo("builtin_mock")
|
||||
return PATH.get_repo("builtin.mock")
|
||||
except UnknownNamespaceError:
|
||||
return PATH.get_repo("builtin")
|
||||
|
||||
|
||||
@@ -1,429 +0,0 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import ast
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from typing import IO, Dict, List, Optional, Set, Tuple
|
||||
|
||||
import spack.repo
|
||||
import spack.util.naming
|
||||
import spack.util.spack_yaml
|
||||
|
||||
|
||||
def _same_contents(f: str, g: str) -> bool:
|
||||
"""Return True if the files have the same contents."""
|
||||
try:
|
||||
with open(f, "rb") as f1, open(g, "rb") as f2:
|
||||
while True:
|
||||
b1 = f1.read(4096)
|
||||
b2 = f2.read(4096)
|
||||
if b1 != b2:
|
||||
return False
|
||||
if not b1 and not b2:
|
||||
break
|
||||
return True
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
|
||||
def migrate_v1_to_v2(
|
||||
repo: spack.repo.Repo, fix: bool, out: IO[str] = sys.stdout, err: IO[str] = sys.stderr
|
||||
) -> Tuple[bool, Optional[spack.repo.Repo]]:
|
||||
"""To upgrade a repo from Package API v1 to v2 we need to:
|
||||
1. ensure ``spack_repo/<namespace>`` parent dirs to the ``repo.yaml`` file.
|
||||
2. rename <pkg dir>/package.py to <pkg module>/package.py.
|
||||
3. bump the version in ``repo.yaml``.
|
||||
"""
|
||||
if not (1, 0) <= repo.package_api < (2, 0):
|
||||
raise RuntimeError(f"Cannot upgrade from {repo.package_api_str} to v2.0")
|
||||
|
||||
with open(os.path.join(repo.root, "repo.yaml"), encoding="utf-8") as f:
|
||||
updated_config = spack.util.spack_yaml.load(f)
|
||||
updated_config["repo"]["api"] = "v2.0"
|
||||
|
||||
namespace = repo.namespace.split(".")
|
||||
|
||||
if not all(
|
||||
spack.util.naming.valid_module_name(part, package_api=(2, 0)) for part in namespace
|
||||
):
|
||||
print(
|
||||
f"Cannot upgrade from v1 to v2, because the namespace '{repo.namespace}' is not a "
|
||||
"valid Python module",
|
||||
file=err,
|
||||
)
|
||||
return False, None
|
||||
|
||||
try:
|
||||
subdirectory = spack.repo._validate_and_normalize_subdir(
|
||||
repo.subdirectory, repo.root, package_api=(2, 0)
|
||||
)
|
||||
except spack.repo.BadRepoError:
|
||||
print(
|
||||
f"Cannot upgrade from v1 to v2, because the subdirectory '{repo.subdirectory}' is not "
|
||||
"a valid Python module",
|
||||
file=err,
|
||||
)
|
||||
return False, None
|
||||
|
||||
new_root = os.path.join(repo.root, "spack_repo", *namespace)
|
||||
|
||||
ino_to_relpath: Dict[int, str] = {}
|
||||
symlink_to_ino: Dict[str, int] = {}
|
||||
|
||||
prefix_len = len(repo.root) + len(os.sep)
|
||||
|
||||
rename: Dict[str, str] = {}
|
||||
dirs_to_create: List[str] = []
|
||||
files_to_copy: List[str] = []
|
||||
|
||||
errors = False
|
||||
|
||||
stack: List[Tuple[str, int]] = [(repo.root, 0)]
|
||||
while stack:
|
||||
path, depth = stack.pop()
|
||||
|
||||
try:
|
||||
entries = os.scandir(path)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
for entry in entries:
|
||||
rel_path = entry.path[prefix_len:]
|
||||
|
||||
if depth == 0 and entry.name in ("spack_repo", "repo.yaml"):
|
||||
continue
|
||||
|
||||
ino_to_relpath[entry.inode()] = entry.path[prefix_len:]
|
||||
|
||||
if entry.is_symlink():
|
||||
try:
|
||||
symlink_to_ino[rel_path] = entry.stat(follow_symlinks=True).st_ino
|
||||
except OSError:
|
||||
symlink_to_ino[rel_path] = -1 # dangling or no access
|
||||
|
||||
continue
|
||||
|
||||
elif entry.is_dir(follow_symlinks=False):
|
||||
if entry.name == "__pycache__":
|
||||
continue
|
||||
|
||||
# check if this is a package
|
||||
if (
|
||||
depth == 1
|
||||
and rel_path.startswith(f"{subdirectory}{os.sep}")
|
||||
and os.path.exists(os.path.join(entry.path, "package.py"))
|
||||
):
|
||||
if "_" in entry.name:
|
||||
print(
|
||||
f"Invalid package name '{entry.name}': underscores are not allowed in "
|
||||
"package names, rename the package with hyphens as separators",
|
||||
file=err,
|
||||
)
|
||||
errors = True
|
||||
continue
|
||||
pkg_dir = spack.util.naming.pkg_name_to_pkg_dir(entry.name, package_api=(2, 0))
|
||||
if pkg_dir != entry.name:
|
||||
rename[f"{subdirectory}{os.sep}{entry.name}"] = (
|
||||
f"{subdirectory}{os.sep}{pkg_dir}"
|
||||
)
|
||||
|
||||
dirs_to_create.append(rel_path)
|
||||
|
||||
stack.append((entry.path, depth + 1))
|
||||
continue
|
||||
|
||||
files_to_copy.append(rel_path)
|
||||
|
||||
if errors:
|
||||
return False, None
|
||||
|
||||
rename_regex = re.compile("^(" + "|".join(re.escape(k) for k in rename.keys()) + ")")
|
||||
|
||||
if fix:
|
||||
os.makedirs(new_root, exist_ok=True)
|
||||
|
||||
def _relocate(rel_path: str) -> Tuple[str, str]:
|
||||
old = os.path.join(repo.root, rel_path)
|
||||
if rename:
|
||||
new_rel = rename_regex.sub(lambda m: rename[m.group(0)], rel_path)
|
||||
else:
|
||||
new_rel = rel_path
|
||||
new = os.path.join(new_root, new_rel)
|
||||
return old, new
|
||||
|
||||
if not fix:
|
||||
print("The following directories, files and symlinks will be created:\n", file=out)
|
||||
|
||||
for rel_path in dirs_to_create:
|
||||
_, new_path = _relocate(rel_path)
|
||||
if fix:
|
||||
try:
|
||||
os.mkdir(new_path)
|
||||
except FileExistsError: # not an error if the directory already exists
|
||||
continue
|
||||
else:
|
||||
print(f"create directory {new_path}", file=out)
|
||||
|
||||
for rel_path in files_to_copy:
|
||||
old_path, new_path = _relocate(rel_path)
|
||||
if os.path.lexists(new_path):
|
||||
# if we already copied this file, don't error.
|
||||
if not _same_contents(old_path, new_path):
|
||||
print(
|
||||
f"Cannot upgrade from v1 to v2, because the file '{new_path}' already exists",
|
||||
file=err,
|
||||
)
|
||||
return False, None
|
||||
continue
|
||||
if fix:
|
||||
shutil.copy2(old_path, new_path)
|
||||
else:
|
||||
print(f"copy {old_path} -> {new_path}", file=out)
|
||||
|
||||
for rel_path, ino in symlink_to_ino.items():
|
||||
old_path, new_path = _relocate(rel_path)
|
||||
if ino in ino_to_relpath:
|
||||
# link by path relative to the new root
|
||||
_, new_target = _relocate(ino_to_relpath[ino])
|
||||
tgt = os.path.relpath(new_target, new_path)
|
||||
else:
|
||||
tgt = os.path.realpath(old_path)
|
||||
|
||||
# no-op if the same, error if different
|
||||
if os.path.lexists(new_path):
|
||||
if not os.path.islink(new_path) or os.readlink(new_path) != tgt:
|
||||
print(
|
||||
f"Cannot upgrade from v1 to v2, because the file '{new_path}' already exists",
|
||||
file=err,
|
||||
)
|
||||
return False, None
|
||||
continue
|
||||
|
||||
if fix:
|
||||
os.symlink(tgt, new_path)
|
||||
else:
|
||||
print(f"create symlink {new_path} -> {tgt}", file=out)
|
||||
|
||||
if fix:
|
||||
with open(os.path.join(new_root, "repo.yaml"), "w", encoding="utf-8") as f:
|
||||
spack.util.spack_yaml.dump(updated_config, f)
|
||||
updated_repo = spack.repo.from_path(new_root)
|
||||
else:
|
||||
print(file=out)
|
||||
updated_repo = repo # compute the import diff on the v1 repo since v2 doesn't exist yet
|
||||
|
||||
result = migrate_v2_imports(
|
||||
updated_repo.packages_path, updated_repo.root, fix=fix, out=out, err=err
|
||||
)
|
||||
|
||||
return result, (updated_repo if fix else None)
|
||||
|
||||
|
||||
def migrate_v2_imports(
|
||||
packages_dir: str, root: str, fix: bool, out: IO[str] = sys.stdout, err: IO[str] = sys.stderr
|
||||
) -> bool:
|
||||
"""In Package API v2.0, packages need to explicitly import package classes and a few other
|
||||
symbols from the build_systems module. This function automatically adds the missing imports
|
||||
to each package.py file in the repository."""
|
||||
|
||||
symbol_to_module = {
|
||||
"AspellDictPackage": "spack_repo.builtin.build_systems.aspell_dict",
|
||||
"AutotoolsPackage": "spack_repo.builtin.build_systems.autotools",
|
||||
"BundlePackage": "spack_repo.builtin.build_systems.bundle",
|
||||
"CachedCMakePackage": "spack_repo.builtin.build_systems.cached_cmake",
|
||||
"cmake_cache_filepath": "spack_repo.builtin.build_systems.cached_cmake",
|
||||
"cmake_cache_option": "spack_repo.builtin.build_systems.cached_cmake",
|
||||
"cmake_cache_path": "spack_repo.builtin.build_systems.cached_cmake",
|
||||
"cmake_cache_string": "spack_repo.builtin.build_systems.cached_cmake",
|
||||
"CargoPackage": "spack_repo.builtin.build_systems.cargo",
|
||||
"CMakePackage": "spack_repo.builtin.build_systems.cmake",
|
||||
"generator": "spack_repo.builtin.build_systems.cmake",
|
||||
"CompilerPackage": "spack_repo.builtin.build_systems.compiler",
|
||||
"CudaPackage": "spack_repo.builtin.build_systems.cuda",
|
||||
"Package": "spack_repo.builtin.build_systems.generic",
|
||||
"GNUMirrorPackage": "spack_repo.builtin.build_systems.gnu",
|
||||
"GoPackage": "spack_repo.builtin.build_systems.go",
|
||||
"IntelPackage": "spack_repo.builtin.build_systems.intel",
|
||||
"LuaPackage": "spack_repo.builtin.build_systems.lua",
|
||||
"MakefilePackage": "spack_repo.builtin.build_systems.makefile",
|
||||
"MavenPackage": "spack_repo.builtin.build_systems.maven",
|
||||
"MesonPackage": "spack_repo.builtin.build_systems.meson",
|
||||
"MSBuildPackage": "spack_repo.builtin.build_systems.msbuild",
|
||||
"NMakePackage": "spack_repo.builtin.build_systems.nmake",
|
||||
"OctavePackage": "spack_repo.builtin.build_systems.octave",
|
||||
"INTEL_MATH_LIBRARIES": "spack_repo.builtin.build_systems.oneapi",
|
||||
"IntelOneApiLibraryPackage": "spack_repo.builtin.build_systems.oneapi",
|
||||
"IntelOneApiLibraryPackageWithSdk": "spack_repo.builtin.build_systems.oneapi",
|
||||
"IntelOneApiPackage": "spack_repo.builtin.build_systems.oneapi",
|
||||
"IntelOneApiStaticLibraryList": "spack_repo.builtin.build_systems.oneapi",
|
||||
"PerlPackage": "spack_repo.builtin.build_systems.perl",
|
||||
"PythonExtension": "spack_repo.builtin.build_systems.python",
|
||||
"PythonPackage": "spack_repo.builtin.build_systems.python",
|
||||
"QMakePackage": "spack_repo.builtin.build_systems.qmake",
|
||||
"RPackage": "spack_repo.builtin.build_systems.r",
|
||||
"RacketPackage": "spack_repo.builtin.build_systems.racket",
|
||||
"ROCmPackage": "spack_repo.builtin.build_systems.rocm",
|
||||
"RubyPackage": "spack_repo.builtin.build_systems.ruby",
|
||||
"SConsPackage": "spack_repo.builtin.build_systems.scons",
|
||||
"SIPPackage": "spack_repo.builtin.build_systems.sip",
|
||||
"SourceforgePackage": "spack_repo.builtin.build_systems.sourceforge",
|
||||
"SourcewarePackage": "spack_repo.builtin.build_systems.sourceware",
|
||||
"WafPackage": "spack_repo.builtin.build_systems.waf",
|
||||
"XorgPackage": "spack_repo.builtin.build_systems.xorg",
|
||||
}
|
||||
|
||||
success = True
|
||||
|
||||
for f in os.scandir(packages_dir):
|
||||
pkg_path = os.path.join(f.path, "package.py")
|
||||
if (
|
||||
f.name in ("__init__.py", "__pycache__")
|
||||
or not f.is_dir(follow_symlinks=False)
|
||||
or os.path.islink(pkg_path)
|
||||
):
|
||||
print(f"Skipping {f.path}", file=err)
|
||||
continue
|
||||
try:
|
||||
with open(pkg_path, "rb") as file:
|
||||
tree = ast.parse(file.read())
|
||||
except (OSError, SyntaxError) as e:
|
||||
print(f"Skipping {pkg_path}: {e}", file=err)
|
||||
continue
|
||||
|
||||
#: Symbols that are referenced in the package and may need to be imported.
|
||||
referenced_symbols: Set[str] = set()
|
||||
|
||||
#: Set of symbols of interest that are already defined through imports, assignments, or
|
||||
#: function definitions.
|
||||
defined_symbols: Set[str] = set()
|
||||
|
||||
best_line: Optional[int] = None
|
||||
|
||||
seen_import = False
|
||||
|
||||
for node in ast.walk(tree):
|
||||
# Get the last import statement from the first block of top-level imports
|
||||
if isinstance(node, ast.Module):
|
||||
for child in ast.iter_child_nodes(node):
|
||||
# if we never encounter an import statement, the best line to add is right
|
||||
# before the first node under the module
|
||||
if best_line is None and isinstance(child, ast.stmt):
|
||||
best_line = child.lineno
|
||||
|
||||
# prefer adding right before `from spack.package import ...`
|
||||
if isinstance(child, ast.ImportFrom) and child.module == "spack.package":
|
||||
seen_import = True
|
||||
best_line = child.lineno # add it right before spack.package
|
||||
break
|
||||
|
||||
# otherwise put it right after the last import statement
|
||||
is_import = isinstance(child, (ast.Import, ast.ImportFrom))
|
||||
|
||||
if is_import:
|
||||
if isinstance(child, (ast.stmt, ast.expr)):
|
||||
best_line = (child.end_lineno or child.lineno) + 1
|
||||
|
||||
if not seen_import and is_import:
|
||||
seen_import = True
|
||||
elif seen_import and not is_import:
|
||||
break
|
||||
|
||||
# Function definitions or assignments to variables whose name is a symbol of interest
|
||||
# are considered as redefinitions, so we skip them.
|
||||
elif isinstance(node, ast.FunctionDef):
|
||||
if node.name in symbol_to_module:
|
||||
print(
|
||||
f"{pkg_path}:{node.lineno}: redefinition of `{node.name}` skipped",
|
||||
file=err,
|
||||
)
|
||||
defined_symbols.add(node.name)
|
||||
elif isinstance(node, ast.Assign):
|
||||
for target in node.targets:
|
||||
if isinstance(target, ast.Name) and target.id in symbol_to_module:
|
||||
print(
|
||||
f"{pkg_path}:{target.lineno}: redefinition of `{target.id}` skipped",
|
||||
file=err,
|
||||
)
|
||||
defined_symbols.add(target.id)
|
||||
|
||||
# Register symbols that are not imported.
|
||||
elif isinstance(node, ast.Name) and node.id in symbol_to_module:
|
||||
referenced_symbols.add(node.id)
|
||||
|
||||
# Register imported symbols to make this operation idempotent
|
||||
elif isinstance(node, ast.ImportFrom):
|
||||
for alias in node.names:
|
||||
if alias.name in symbol_to_module:
|
||||
defined_symbols.add(alias.name)
|
||||
if node.module == "spack.package":
|
||||
success = False
|
||||
print(
|
||||
f"{pkg_path}:{node.lineno}: `{alias.name}` is imported from "
|
||||
"`spack.package`, which no longer provides this symbol",
|
||||
file=err,
|
||||
)
|
||||
|
||||
if alias.asname and alias.asname in symbol_to_module:
|
||||
defined_symbols.add(alias.asname)
|
||||
|
||||
# Remove imported symbols from the referenced symbols
|
||||
referenced_symbols.difference_update(defined_symbols)
|
||||
|
||||
if not referenced_symbols:
|
||||
continue
|
||||
|
||||
if best_line is None:
|
||||
print(f"{pkg_path}: failed to update imports", file=err)
|
||||
success = False
|
||||
continue
|
||||
|
||||
# Add the missing imports right after the last import statement
|
||||
with open(pkg_path, "r", encoding="utf-8", newline="") as file:
|
||||
lines = file.readlines()
|
||||
|
||||
# Group missing symbols by their module
|
||||
missing_imports_by_module: Dict[str, list] = {}
|
||||
for symbol in referenced_symbols:
|
||||
module = symbol_to_module[symbol]
|
||||
if module not in missing_imports_by_module:
|
||||
missing_imports_by_module[module] = []
|
||||
missing_imports_by_module[module].append(symbol)
|
||||
|
||||
new_lines = [
|
||||
f"from {module} import {', '.join(sorted(symbols))}\n"
|
||||
for module, symbols in sorted(missing_imports_by_module.items())
|
||||
]
|
||||
|
||||
if not seen_import:
|
||||
new_lines.extend(("\n", "\n"))
|
||||
|
||||
if not fix: # only print the diff
|
||||
success = False # packages need to be fixed, but we didn't do it
|
||||
diff_start, diff_end = max(1, best_line - 3), min(best_line + 2, len(lines))
|
||||
num_changed = diff_end - diff_start + 1
|
||||
num_added = num_changed + len(new_lines)
|
||||
rel_pkg_path = os.path.relpath(pkg_path, start=root)
|
||||
out.write(f"--- a/{rel_pkg_path}\n+++ b/{rel_pkg_path}\n")
|
||||
out.write(f"@@ -{diff_start},{num_changed} +{diff_start},{num_added} @@\n")
|
||||
for line in lines[diff_start - 1 : best_line - 1]:
|
||||
out.write(f" {line}")
|
||||
for line in new_lines:
|
||||
out.write(f"+{line}")
|
||||
for line in lines[best_line - 1 : diff_end]:
|
||||
out.write(f" {line}")
|
||||
continue
|
||||
|
||||
lines[best_line - 1 : best_line - 1] = new_lines
|
||||
|
||||
tmp_file = pkg_path + ".tmp"
|
||||
|
||||
with open(tmp_file, "w", encoding="utf-8", newline="") as file:
|
||||
file.writelines(lines)
|
||||
|
||||
os.replace(tmp_file, pkg_path)
|
||||
|
||||
return success
|
||||
23
lib/spack/spack/schema/toolchains.py
Normal file
23
lib/spack/spack/schema/toolchains.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for repos.yaml configuration file.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/toolchains.py
|
||||
:lines: 14-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {"toolchains": {"type": "object", "default": {}}}
|
||||
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack toolchain configuration file schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
||||
@@ -572,7 +572,7 @@ def format_unsolved(unsolved_specs):
|
||||
for input_spec, candidate in unsolved_specs:
|
||||
msg += f"\n\tInput spec: {str(input_spec)}"
|
||||
if candidate:
|
||||
msg += f"\n\tCandidate spec: {str(candidate)}"
|
||||
msg += f"\n\tCandidate spec: {candidate.long_spec}"
|
||||
else:
|
||||
msg += "\n\t(No candidate specs from solver)"
|
||||
return msg
|
||||
@@ -1416,12 +1416,17 @@ class ConstraintOrigin(enum.Enum):
|
||||
result.
|
||||
"""
|
||||
|
||||
CONDITIONAL_SPEC = 0
|
||||
DEPENDS_ON = 1
|
||||
REQUIRE = 2
|
||||
|
||||
@staticmethod
|
||||
def _SUFFIXES() -> Dict["ConstraintOrigin", str]:
|
||||
return {ConstraintOrigin.DEPENDS_ON: "_dep", ConstraintOrigin.REQUIRE: "_req"}
|
||||
return {
|
||||
ConstraintOrigin.CONDITIONAL_SPEC: "_cond",
|
||||
ConstraintOrigin.DEPENDS_ON: "_dep",
|
||||
ConstraintOrigin.REQUIRE: "_req",
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def append_type_suffix(pkg_id: str, kind: "ConstraintOrigin") -> str:
|
||||
@@ -1877,6 +1882,73 @@ def _get_condition_id(
|
||||
|
||||
return cond_id
|
||||
|
||||
def condition_clauses(
|
||||
self,
|
||||
required_spec: spack.spec.Spec,
|
||||
imposed_spec: Optional[spack.spec.Spec] = None,
|
||||
*,
|
||||
required_name: Optional[str] = None,
|
||||
imposed_name: Optional[str] = None,
|
||||
msg: Optional[str] = None,
|
||||
context: Optional[ConditionContext] = None,
|
||||
):
|
||||
"""Generate facts for a dependency or virtual provider condition.
|
||||
|
||||
Arguments:
|
||||
required_spec: the constraints that triggers this condition
|
||||
imposed_spec: the constraints that are imposed when this condition is triggered
|
||||
required_name: name for ``required_spec``
|
||||
(required if required_spec is anonymous, ignored if not)
|
||||
imposed_name: name for ``imposed_spec``
|
||||
(required if imposed_spec is anonymous, ignored if not)
|
||||
msg: description of the condition
|
||||
context: if provided, indicates how to modify the clause-sets for the required/imposed
|
||||
specs based on the type of constraint they are generated for (e.g. `depends_on`)
|
||||
Returns:
|
||||
int: id of the condition created by this function
|
||||
"""
|
||||
clauses = []
|
||||
required_name = required_spec.name or required_name
|
||||
if not required_name:
|
||||
raise ValueError(f"Must provide a name for anonymous condition: '{required_spec}'")
|
||||
|
||||
if not context:
|
||||
context = ConditionContext()
|
||||
context.transform_imposed = remove_facts("node", "virtual_node")
|
||||
|
||||
if imposed_spec:
|
||||
imposed_name = imposed_spec.name or imposed_name
|
||||
if not imposed_name:
|
||||
raise ValueError(f"Must provide a name for imposed constraint: '{imposed_spec}'")
|
||||
|
||||
with named_spec(required_spec, required_name), named_spec(imposed_spec, imposed_name):
|
||||
# Check if we can emit the requirements before updating the condition ID counter.
|
||||
# In this way, if a condition can't be emitted but the exception is handled in the
|
||||
# caller, we won't emit partial facts.
|
||||
|
||||
condition_id = next(self._id_counter)
|
||||
requirement_context = context.requirement_context()
|
||||
trigger_id = self._get_condition_id(
|
||||
required_spec, cache=self._trigger_cache, body=True, context=requirement_context
|
||||
)
|
||||
clauses.append(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
|
||||
clauses.append(fn.condition_reason(condition_id, msg))
|
||||
clauses.append(
|
||||
fn.pkg_fact(required_spec.name, fn.condition_trigger(condition_id, trigger_id))
|
||||
)
|
||||
if not imposed_spec:
|
||||
return clauses, condition_id
|
||||
|
||||
impose_context = context.impose_context()
|
||||
effect_id = self._get_condition_id(
|
||||
imposed_spec, cache=self._effect_cache, body=False, context=impose_context
|
||||
)
|
||||
clauses.append(
|
||||
fn.pkg_fact(required_spec.name, fn.condition_effect(condition_id, effect_id))
|
||||
)
|
||||
|
||||
return clauses, condition_id
|
||||
|
||||
def condition(
|
||||
self,
|
||||
required_spec: spack.spec.Spec,
|
||||
@@ -1902,46 +1974,18 @@ def condition(
|
||||
Returns:
|
||||
int: id of the condition created by this function
|
||||
"""
|
||||
required_name = required_spec.name or required_name
|
||||
if not required_name:
|
||||
raise ValueError(f"Must provide a name for anonymous condition: '{required_spec}'")
|
||||
clauses, condition_id = self.condition_clauses(
|
||||
required_spec=required_spec,
|
||||
imposed_spec=imposed_spec,
|
||||
required_name=required_name,
|
||||
imposed_name=imposed_name,
|
||||
msg=msg,
|
||||
context=context,
|
||||
)
|
||||
for clause in clauses:
|
||||
self.gen.fact(clause)
|
||||
|
||||
if not context:
|
||||
context = ConditionContext()
|
||||
context.transform_imposed = remove_facts("node", "virtual_node")
|
||||
|
||||
if imposed_spec:
|
||||
imposed_name = imposed_spec.name or imposed_name
|
||||
if not imposed_name:
|
||||
raise ValueError(f"Must provide a name for imposed constraint: '{imposed_spec}'")
|
||||
|
||||
with named_spec(required_spec, required_name), named_spec(imposed_spec, imposed_name):
|
||||
# Check if we can emit the requirements before updating the condition ID counter.
|
||||
# In this way, if a condition can't be emitted but the exception is handled in the
|
||||
# caller, we won't emit partial facts.
|
||||
|
||||
condition_id = next(self._id_counter)
|
||||
requirement_context = context.requirement_context()
|
||||
trigger_id = self._get_condition_id(
|
||||
required_spec, cache=self._trigger_cache, body=True, context=requirement_context
|
||||
)
|
||||
self.gen.fact(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
|
||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(required_spec.name, fn.condition_trigger(condition_id, trigger_id))
|
||||
)
|
||||
if not imposed_spec:
|
||||
return condition_id
|
||||
|
||||
impose_context = context.impose_context()
|
||||
effect_id = self._get_condition_id(
|
||||
imposed_spec, cache=self._effect_cache, body=False, context=impose_context
|
||||
)
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(required_spec.name, fn.condition_effect(condition_id, effect_id))
|
||||
)
|
||||
|
||||
return condition_id
|
||||
return condition_id
|
||||
|
||||
def impose(self, condition_id, imposed_spec, node=True, body=False):
|
||||
imposed_constraints = self.spec_clauses(imposed_spec, body=body)
|
||||
@@ -2212,6 +2256,10 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
||||
msg=f"{input_spec} is a requirement for package {pkg_name}",
|
||||
context=context,
|
||||
)
|
||||
|
||||
# Conditions don't handle conditional dependencies directly
|
||||
# Those are handled separately here
|
||||
self.generate_conditional_dep_conditions(spec, member_id)
|
||||
except Exception as e:
|
||||
# Do not raise if the rule comes from the 'all' subsection, since usability
|
||||
# would be impaired. If a rule does not apply for a specific package, just
|
||||
@@ -2574,6 +2622,10 @@ def _spec_clauses(
|
||||
if transitive:
|
||||
# TODO: Eventually distinguish 2 deps on the same pkg (build and link)
|
||||
for dspec in spec.edges_to_dependencies():
|
||||
# Ignore conditional dependencies, they are handled by caller
|
||||
if dspec.when != spack.spec.Spec():
|
||||
continue
|
||||
|
||||
dep = dspec.spec
|
||||
|
||||
if spec.concrete:
|
||||
@@ -3200,6 +3252,9 @@ def setup(
|
||||
self.gen.h1("Spec Constraints")
|
||||
self.literal_specs(specs)
|
||||
|
||||
self.trigger_rules()
|
||||
self.effect_rules()
|
||||
|
||||
self.gen.h1("Variant Values defined in specs")
|
||||
self.define_variant_values()
|
||||
|
||||
@@ -3333,11 +3388,49 @@ def literal_specs(self, specs):
|
||||
cache[imposed_spec_key] = (effect_id, requirements)
|
||||
self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id)))
|
||||
|
||||
# Create subcondition with any conditional dependencies
|
||||
# self.spec_clauses does not do anything with conditional
|
||||
# dependencies
|
||||
self.generate_conditional_dep_conditions(spec, condition_id)
|
||||
|
||||
if self.concretize_everything:
|
||||
self.gen.fact(fn.solve_literal(trigger_id))
|
||||
|
||||
self.effect_rules()
|
||||
|
||||
def generate_conditional_dep_conditions(self, spec, condition_id):
|
||||
for dspec in spec.traverse_edges():
|
||||
# Ignore unconditional deps
|
||||
if dspec.when == spack.spec.Spec():
|
||||
continue
|
||||
|
||||
# Cannot use "virtual_node" attr as key for condition
|
||||
# because reused specs do not track virtual nodes.
|
||||
# Instead, track whether the parent uses the virtual
|
||||
def virtual_handler(input_spec, requirements):
|
||||
ret = remove_facts("virtual_node")(input_spec, requirements)
|
||||
for edge in input_spec.traverse_edges(root=False, cover="edges"):
|
||||
if spack.repo.PATH.is_virtual(edge.spec.name):
|
||||
ret.append(fn.attr("uses_virtual", edge.parent.name, edge.spec.name))
|
||||
return ret
|
||||
|
||||
context = ConditionContext()
|
||||
context.source = ConstraintOrigin.append_type_suffix(
|
||||
dspec.parent.name, ConstraintOrigin.CONDITIONAL_SPEC
|
||||
)
|
||||
# Default is to remove node-like attrs, override here
|
||||
context.transform_required = virtual_handler
|
||||
context.transform_imposed = lambda x, y: y
|
||||
|
||||
subcondition_id = self.condition(
|
||||
dspec.when,
|
||||
dspec.spec,
|
||||
required_name=dspec.parent.name,
|
||||
context=context,
|
||||
msg=f"Conditional dependency in literal ^[when={dspec.when}]{dspec.spec}",
|
||||
)
|
||||
self.gen.fact(fn.subcondition(subcondition_id, condition_id))
|
||||
|
||||
def validate_and_define_versions_from_requirements(
|
||||
self, *, allow_deprecated: bool, require_checksum: bool
|
||||
):
|
||||
@@ -3785,6 +3878,7 @@ class SpecBuilder:
|
||||
r"^package_hash$",
|
||||
r"^root$",
|
||||
r"^track_dependencies$",
|
||||
r"^uses_virtual$",
|
||||
r"^variant_default_value_from_cli$",
|
||||
r"^virtual_node$",
|
||||
r"^virtual_on_incoming_edges$",
|
||||
|
||||
@@ -429,8 +429,23 @@ trigger_and_effect(Package, TriggerID, EffectID)
|
||||
|
||||
% condition_holds(ID, node(ID, Package)) implies all imposed_constraints, unless do_not_impose(ID, node(ID, Package))
|
||||
% is derived. This allows imposed constraints to be canceled in special cases.
|
||||
|
||||
% Effects of direct conditions hold if the trigger holds
|
||||
impose(EffectID, node(X, Package))
|
||||
:- trigger_and_effect(Package, TriggerID, EffectID),
|
||||
:- pkg_fact(Package, condition_effect(ConditionID, EffectID)),
|
||||
not subcondition(ConditionID, _),
|
||||
trigger_and_effect(Package, TriggerID, EffectID),
|
||||
trigger_node(TriggerID, _, node(X, Package)),
|
||||
trigger_condition_holds(TriggerID, node(X, Package)),
|
||||
not do_not_impose(EffectID, node(X, Package)).
|
||||
|
||||
% Effects of subconditions hold if the trigger holds and the
|
||||
% primary condition holds
|
||||
impose(EffectID, node(X, Package))
|
||||
:- pkg_fact(Package, condition_effect(SubconditionId, EffectID)),
|
||||
subcondition(SubconditionID, ConditionID),
|
||||
condition_holds(ConditionID, node(X, Package)),
|
||||
trigger_and_effect(Package, TriggerID, EffectID),
|
||||
trigger_node(TriggerID, _, node(X, Package)),
|
||||
trigger_condition_holds(TriggerID, node(X, Package)),
|
||||
not do_not_impose(EffectID, node(X, Package)).
|
||||
@@ -645,6 +660,16 @@ virtual_condition_holds(node(Y, A2), Virtual)
|
||||
attr("virtual_on_edge", node(X, A1), node(Y, A2), Virtual),
|
||||
not build(node(X, A1)).
|
||||
|
||||
% Simplified virtual information for conditionl requirements in
|
||||
% conditional dependencies
|
||||
% Most specs track virtuals on edges
|
||||
attr("uses_virtual", PackageNode, Virtual) :-
|
||||
attr("virtual_on_edge", PackageNode, _, Virtual).
|
||||
|
||||
% Reused specs don't track a real edge to build-only deps
|
||||
attr("uses_virtual", PackageNode, Virtual) :-
|
||||
attr("virtual_on_build_edge", PackageNode, _, Virtual).
|
||||
|
||||
% we cannot have additional variant values when we are working with concrete specs
|
||||
:- attr("node", node(ID, Package)),
|
||||
attr("hash", node(ID, Package), Hash),
|
||||
@@ -660,6 +685,7 @@ virtual_condition_holds(node(Y, A2), Virtual)
|
||||
internal_error("imposed hash without imposing all flag values").
|
||||
|
||||
#defined condition/2.
|
||||
#defined subcondition/2.
|
||||
#defined condition_requirement/3.
|
||||
#defined condition_requirement/4.
|
||||
#defined condition_requirement/5.
|
||||
|
||||
@@ -720,7 +720,7 @@ class DependencySpec:
|
||||
virtuals: virtual packages provided from child to parent node.
|
||||
"""
|
||||
|
||||
__slots__ = "parent", "spec", "depflag", "virtuals", "direct"
|
||||
__slots__ = "parent", "spec", "depflag", "virtuals", "direct", "when"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -730,12 +730,14 @@ def __init__(
|
||||
depflag: dt.DepFlag,
|
||||
virtuals: Tuple[str, ...],
|
||||
direct: bool = False,
|
||||
when: Optional["Spec"] = None,
|
||||
):
|
||||
self.parent = parent
|
||||
self.spec = spec
|
||||
self.depflag = depflag
|
||||
self.virtuals = tuple(sorted(set(virtuals)))
|
||||
self.direct = direct
|
||||
self.when = when or Spec()
|
||||
|
||||
def update_deptypes(self, depflag: dt.DepFlag) -> bool:
|
||||
"""Update the current dependency types"""
|
||||
@@ -766,6 +768,7 @@ def copy(self) -> "DependencySpec":
|
||||
depflag=self.depflag,
|
||||
virtuals=self.virtuals,
|
||||
direct=self.direct,
|
||||
when=self.when,
|
||||
)
|
||||
|
||||
def _cmp_iter(self):
|
||||
@@ -777,10 +780,13 @@ def _cmp_iter(self):
|
||||
def __str__(self) -> str:
|
||||
parent = self.parent.name if self.parent else None
|
||||
child = self.spec.name if self.spec else None
|
||||
return f"{parent} {self.depflag}[virtuals={','.join(self.virtuals)}] --> {child}"
|
||||
virtuals_string = f"virtuals={','.join(self.virtuals)}" if self.virtuals else ""
|
||||
when_string = f"when='{self.when}'" if self.when != Spec() else ""
|
||||
edge_attrs = filter(lambda x: bool(x), (virtuals_string, when_string))
|
||||
return f"{parent} {self.depflag}[{' '.join(edge_attrs)}] --> {child}"
|
||||
|
||||
def flip(self) -> "DependencySpec":
|
||||
"""Flip the dependency, and drop virtual information"""
|
||||
"""Flip the dependency, and drop virtual and conditional information"""
|
||||
return DependencySpec(
|
||||
parent=self.spec, spec=self.parent, depflag=self.depflag, virtuals=()
|
||||
)
|
||||
@@ -1021,6 +1027,7 @@ def select(
|
||||
child: Optional[str] = None,
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
virtuals: Optional[Union[str, Sequence[str]]] = None,
|
||||
when: Optional["Spec"] = None,
|
||||
) -> List[DependencySpec]:
|
||||
"""Selects a list of edges and returns them.
|
||||
|
||||
@@ -1040,6 +1047,7 @@ def select(
|
||||
child: name of the child package
|
||||
depflag: allowed dependency types in flag form
|
||||
virtuals: list of virtuals or specific virtual on the edge
|
||||
when: condition on conditional dependency, or Spec() for unconditional dependency only
|
||||
"""
|
||||
if not depflag:
|
||||
return []
|
||||
@@ -1065,6 +1073,9 @@ def select(
|
||||
else:
|
||||
selected = (dep for dep in selected if any(v in dep.virtuals for v in virtuals))
|
||||
|
||||
if when is not None:
|
||||
selected = (dep for dep in selected if dep.when == when)
|
||||
|
||||
return list(selected)
|
||||
|
||||
def clear(self):
|
||||
@@ -1612,6 +1623,7 @@ def edges_to_dependencies(
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
*,
|
||||
virtuals: Optional[Union[str, Sequence[str]]] = None,
|
||||
when: Optional["Spec"] = None,
|
||||
) -> List[DependencySpec]:
|
||||
"""Returns a list of edges connecting this node in the DAG to children.
|
||||
|
||||
@@ -1619,9 +1631,13 @@ def edges_to_dependencies(
|
||||
name: filter dependencies by package name
|
||||
depflag: allowed dependency types
|
||||
virtuals: allowed virtuals
|
||||
when: condition on conditional dependencies (or Spec() for unconditional)
|
||||
"""
|
||||
return [
|
||||
d for d in self._dependencies.select(child=name, depflag=depflag, virtuals=virtuals)
|
||||
d
|
||||
for d in self._dependencies.select(
|
||||
child=name, depflag=depflag, virtuals=virtuals, when=when
|
||||
)
|
||||
]
|
||||
|
||||
@property
|
||||
@@ -1633,20 +1649,26 @@ def edge_attributes(self) -> str:
|
||||
|
||||
union = DependencySpec(parent=Spec(), spec=self, depflag=0, virtuals=())
|
||||
all_direct_edges = all(x.direct for x in edges)
|
||||
dep_conditions = set()
|
||||
|
||||
for edge in edges:
|
||||
union.update_deptypes(edge.depflag)
|
||||
union.update_virtuals(edge.virtuals)
|
||||
dep_conditions.add(edge.when)
|
||||
|
||||
deptypes_str = ""
|
||||
if not all_direct_edges and union.depflag:
|
||||
deptypes_str = f"deptypes={','.join(dt.flag_to_tuple(union.depflag))}"
|
||||
|
||||
virtuals_str = f"virtuals={','.join(union.virtuals)}" if union.virtuals else ""
|
||||
if not deptypes_str and not virtuals_str:
|
||||
return ""
|
||||
result = f"{deptypes_str} {virtuals_str}".strip()
|
||||
return f"[{result}]"
|
||||
|
||||
conditions = [str(c) for c in dep_conditions if c != Spec()]
|
||||
when_str = f"when='{','.join(conditions)}'" if conditions else ""
|
||||
|
||||
result = " ".join(filter(lambda x: bool(x), (when_str, deptypes_str, virtuals_str)))
|
||||
if result:
|
||||
result = f"[{result}]"
|
||||
return result
|
||||
|
||||
def dependencies(
|
||||
self,
|
||||
@@ -1654,6 +1676,7 @@ def dependencies(
|
||||
deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL,
|
||||
*,
|
||||
virtuals: Optional[Union[str, Sequence[str]]] = None,
|
||||
when: Optional["Spec"] = None,
|
||||
) -> List["Spec"]:
|
||||
"""Returns a list of direct dependencies (nodes in the DAG)
|
||||
|
||||
@@ -1661,11 +1684,15 @@ def dependencies(
|
||||
name: filter dependencies by package name
|
||||
deptype: allowed dependency types
|
||||
virtuals: allowed virtuals
|
||||
when: condition on conditional dependency or Spec() for unconditional
|
||||
"""
|
||||
if not isinstance(deptype, dt.DepFlag):
|
||||
deptype = dt.canonicalize(deptype)
|
||||
return [
|
||||
d.spec for d in self.edges_to_dependencies(name, depflag=deptype, virtuals=virtuals)
|
||||
d.spec
|
||||
for d in self.edges_to_dependencies(
|
||||
name, depflag=deptype, virtuals=virtuals, when=when
|
||||
)
|
||||
]
|
||||
|
||||
def dependents(
|
||||
@@ -1752,7 +1779,13 @@ def _set_architecture(self, **kwargs):
|
||||
setattr(self.architecture, new_attr, new_value)
|
||||
|
||||
def _add_dependency(
|
||||
self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...], direct: bool = False
|
||||
self,
|
||||
spec: "Spec",
|
||||
*,
|
||||
depflag: dt.DepFlag,
|
||||
virtuals: Tuple[str, ...],
|
||||
direct: bool = False,
|
||||
when: Optional["Spec"] = None,
|
||||
):
|
||||
"""Called by the parser to add another spec as a dependency.
|
||||
|
||||
@@ -1760,23 +1793,33 @@ def _add_dependency(
|
||||
depflag: dependency type for this edge
|
||||
virtuals: virtuals on this edge
|
||||
direct: if True denotes a direct dependency (associated with the % sigil)
|
||||
when: if non-None, condition under which dependency holds
|
||||
"""
|
||||
if when is None:
|
||||
when = Spec()
|
||||
|
||||
if spec.name not in self._dependencies or not spec.name:
|
||||
self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals, direct=direct)
|
||||
self.add_dependency_edge(
|
||||
spec, depflag=depflag, virtuals=virtuals, direct=direct, when=when
|
||||
)
|
||||
return
|
||||
|
||||
# Keep the intersection of constraints when a dependency is added multiple times with
|
||||
# the same deptype. Add a new dependency if it is added with a compatible deptype
|
||||
# (for example, a build-only dependency is compatible with a link-only dependenyc).
|
||||
# (for example, a build-only dependency is compatible with a link-only dependency).
|
||||
# The only restrictions, currently, are that we cannot add edges with overlapping
|
||||
# dependency types and we cannot add multiple edges that have link/run dependency types.
|
||||
# See ``spack.deptypes.compatible``.
|
||||
orig = self._dependencies[spec.name]
|
||||
try:
|
||||
dspec = next(dspec for dspec in orig if depflag == dspec.depflag)
|
||||
dspec = next(
|
||||
dspec for dspec in orig if depflag == dspec.depflag and when == dspec.when
|
||||
)
|
||||
except StopIteration:
|
||||
# Error if we have overlapping or incompatible deptypes
|
||||
if any(not dt.compatible(dspec.depflag, depflag) for dspec in orig):
|
||||
if any(not dt.compatible(dspec.depflag, depflag) for dspec in orig) and all(
|
||||
dspec.when == when for dspec in orig
|
||||
):
|
||||
edge_attrs = f"deptypes={dt.flag_to_chars(depflag).strip()}"
|
||||
required_dep_str = f"^[{edge_attrs}] {str(spec)}"
|
||||
|
||||
@@ -1785,7 +1828,9 @@ def _add_dependency(
|
||||
f"\t'{str(self)}' cannot depend on '{required_dep_str}'"
|
||||
)
|
||||
|
||||
self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals, direct=direct)
|
||||
self.add_dependency_edge(
|
||||
spec, depflag=depflag, virtuals=virtuals, direct=direct, when=when
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
@@ -1803,6 +1848,7 @@ def add_dependency_edge(
|
||||
depflag: dt.DepFlag,
|
||||
virtuals: Tuple[str, ...],
|
||||
direct: bool = False,
|
||||
when: Optional["Spec"] = None,
|
||||
):
|
||||
"""Add a dependency edge to this spec.
|
||||
|
||||
@@ -1811,13 +1857,20 @@ def add_dependency_edge(
|
||||
deptypes: dependency types for this edge
|
||||
virtuals: virtuals provided by this edge
|
||||
direct: if True denotes a direct dependency
|
||||
when: if non-None, condition under which dependency holds
|
||||
"""
|
||||
if when is None:
|
||||
when = Spec()
|
||||
|
||||
# Check if we need to update edges that are already present
|
||||
selected = self._dependencies.select(child=dependency_spec.name)
|
||||
for edge in selected:
|
||||
has_errors, details = False, []
|
||||
msg = f"cannot update the edge from {edge.parent.name} to {edge.spec.name}"
|
||||
|
||||
if edge.when != when:
|
||||
continue
|
||||
|
||||
# If the dependency is to an existing spec, we can update dependency
|
||||
# types. If it is to a new object, check deptype compatibility.
|
||||
if id(edge.spec) != id(dependency_spec) and not dt.compatible(edge.depflag, depflag):
|
||||
@@ -1841,7 +1894,7 @@ def add_dependency_edge(
|
||||
raise spack.error.SpecError(msg, "\n".join(details))
|
||||
|
||||
for edge in selected:
|
||||
if id(dependency_spec) == id(edge.spec):
|
||||
if id(dependency_spec) == id(edge.spec) and edge.when == when:
|
||||
# If we are here, it means the edge object was previously added to
|
||||
# both the parent and the child. When we update this object they'll
|
||||
# both see the deptype modification.
|
||||
@@ -1850,7 +1903,7 @@ def add_dependency_edge(
|
||||
return
|
||||
|
||||
edge = DependencySpec(
|
||||
self, dependency_spec, depflag=depflag, virtuals=virtuals, direct=direct
|
||||
self, dependency_spec, depflag=depflag, virtuals=virtuals, direct=direct, when=when
|
||||
)
|
||||
self._dependencies.add(edge)
|
||||
dependency_spec._dependents.add(edge)
|
||||
@@ -2085,14 +2138,14 @@ def long_spec(self):
|
||||
new_name = spack.aliases.BUILTIN_TO_LEGACY_COMPILER.get(current_name, current_name)
|
||||
# note: depflag not allowed, currently, on "direct" edges
|
||||
edge_attributes = ""
|
||||
if item.virtuals:
|
||||
if item.virtuals or item.when != Spec():
|
||||
edge_attributes = item.spec.format("{edge_attributes}") + " "
|
||||
|
||||
parts.append(f"%{edge_attributes}{item.spec.format()}".replace(current_name, new_name))
|
||||
for item in sorted(transitive, key=lambda x: x.spec.name):
|
||||
# Recurse to attach build deps in order
|
||||
edge_attributes = ""
|
||||
if item.virtuals or item.depflag:
|
||||
if item.virtuals or item.depflag or item.when != Spec():
|
||||
edge_attributes = item.spec.format("{edge_attributes}") + " "
|
||||
parts.append(f"^{edge_attributes}{str(item.spec)}")
|
||||
return " ".join(parts).strip()
|
||||
@@ -3126,41 +3179,22 @@ def _constrain_dependencies(self, other: "Spec") -> bool:
|
||||
if any(not d.name for d in other.traverse(root=False)):
|
||||
raise UnconstrainableDependencySpecError(other)
|
||||
|
||||
# Handle common first-order constraints directly
|
||||
# Note: This doesn't handle constraining transitive dependencies with the same name
|
||||
# as direct dependencies
|
||||
changed = False
|
||||
common_dependencies = {x.name for x in self.dependencies()}
|
||||
common_dependencies &= {x.name for x in other.dependencies()}
|
||||
for name in common_dependencies:
|
||||
changed |= self[name].constrain(other[name], deps=True)
|
||||
if name in self._dependencies:
|
||||
# WARNING: This function is an implementation detail of the
|
||||
# WARNING: original concretizer. Since with that greedy
|
||||
# WARNING: algorithm we don't allow multiple nodes from
|
||||
# WARNING: the same package in a DAG, here we hard-code
|
||||
# WARNING: using index 0 i.e. we assume that we have only
|
||||
# WARNING: one edge from package "name"
|
||||
edges_from_name = self._dependencies[name]
|
||||
changed |= edges_from_name[0].update_deptypes(other._dependencies[name][0].depflag)
|
||||
changed |= edges_from_name[0].update_virtuals(
|
||||
other._dependencies[name][0].virtuals
|
||||
reference_spec = self.copy(deps=True)
|
||||
for edge in other.edges_to_dependencies():
|
||||
existing = self.edges_to_dependencies(edge.spec.name, when=edge.when)
|
||||
if existing:
|
||||
existing[0].spec.constrain(edge.spec)
|
||||
existing[0].update_deptypes(edge.depflag)
|
||||
existing[0].update_virtuals(edge.virtuals)
|
||||
else:
|
||||
self.add_dependency_edge(
|
||||
edge.spec,
|
||||
depflag=edge.depflag,
|
||||
virtuals=edge.virtuals,
|
||||
direct=edge.direct,
|
||||
when=edge.when,
|
||||
)
|
||||
|
||||
# Update with additional constraints from other spec
|
||||
# operate on direct dependencies only, because a concrete dep
|
||||
# represented by hash may have structure that needs to be preserved
|
||||
for name in other.direct_dep_difference(self):
|
||||
dep_spec_copy = other._get_dependency(name)
|
||||
self._add_dependency(
|
||||
dep_spec_copy.spec.copy(),
|
||||
depflag=dep_spec_copy.depflag,
|
||||
virtuals=dep_spec_copy.virtuals,
|
||||
direct=dep_spec_copy.direct,
|
||||
)
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
return self != reference_spec
|
||||
|
||||
def common_dependencies(self, other):
|
||||
"""Return names of dependencies that self and other have in common."""
|
||||
@@ -3397,10 +3431,6 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
if not other._dependencies:
|
||||
return True
|
||||
|
||||
# If we have no dependencies, we can't satisfy any constraints.
|
||||
if not self._dependencies and self.original_spec_format() >= 5 and not self.external:
|
||||
return False
|
||||
|
||||
# If we arrived here, the lhs root node satisfies the rhs root node. Now we need to check
|
||||
# all the edges that have an abstract parent, and verify that they match some edge in the
|
||||
# lhs.
|
||||
@@ -3411,6 +3441,11 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
|
||||
mock_nodes_from_old_specfiles = set()
|
||||
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
|
||||
# Skip checking any conditional edge that is not satisfied
|
||||
if rhs_edge.when != Spec() and not self.satisfies(rhs_edge.when):
|
||||
# TODO: this misses the case that the rhs statically satisfies its own condition
|
||||
continue
|
||||
|
||||
# If we are checking for ^mpi we need to verify if there is any edge
|
||||
if spack.repo.PATH.is_virtual(rhs_edge.spec.name):
|
||||
rhs_edge.update_virtuals(virtuals=(rhs_edge.spec.name,))
|
||||
@@ -3468,6 +3503,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
for lhs_edge in self.traverse_edges(
|
||||
root=False, cover="edges", deptype=("link", "run")
|
||||
):
|
||||
# TODO: do we need to avoid conditional edges here
|
||||
lhs_edges[lhs_edge.spec.name].add(lhs_edge)
|
||||
for virtual_name in lhs_edge.virtuals:
|
||||
lhs_edges[virtual_name].add(lhs_edge)
|
||||
@@ -3484,6 +3520,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
return False
|
||||
|
||||
for virtual in rhs_edge.virtuals:
|
||||
# TODO: consider how this could apply to conditional edges
|
||||
has_virtual = any(
|
||||
virtual in edge.virtuals for edge in lhs_edges[current_dependency_name]
|
||||
)
|
||||
@@ -3491,11 +3528,22 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
return False
|
||||
|
||||
# Edges have been checked above already, hence deps=False
|
||||
lhs_nodes = [x for x in self.traverse(root=False)] + sorted(mock_nodes_from_old_specfiles)
|
||||
return all(
|
||||
any(lhs.satisfies(rhs, deps=False) for lhs in lhs_nodes)
|
||||
for rhs in other.traverse(root=False)
|
||||
)
|
||||
lhs_nodes = list(self.traverse(root=False)) + sorted(mock_nodes_from_old_specfiles)
|
||||
for rhs in other.traverse(root=False):
|
||||
# Possible lhs nodes to match this rhs node
|
||||
lhss = [lhs for lhs in lhs_nodes if lhs.satisfies(rhs, deps=False)]
|
||||
|
||||
# Check whether the node needs matching (not a conditional that isn't satisfied)
|
||||
if not any(self.satisfies(e.when) for e in rhs.edges_from_dependents()):
|
||||
# TODO: This technically misses the case that the edge is analogous
|
||||
# to an edge lower in the DAG, and could give a false negative in that case
|
||||
continue
|
||||
|
||||
# If there is no matching lhs for this rhs node
|
||||
if not lhss:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@property # type: ignore[misc] # decorated prop not supported in mypy
|
||||
def patches(self):
|
||||
@@ -3629,6 +3677,7 @@ def spid(spec):
|
||||
depflag=edge.depflag,
|
||||
virtuals=edge.virtuals,
|
||||
direct=edge.direct,
|
||||
when=edge.when,
|
||||
)
|
||||
|
||||
def copy(self, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, **kwargs):
|
||||
|
||||
@@ -56,16 +56,18 @@
|
||||
specs to avoid ambiguity. Both are provided because ~ can cause shell
|
||||
expansion when it is the first character in an id typed on the command line.
|
||||
"""
|
||||
import itertools
|
||||
import json
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
import warnings
|
||||
from typing import Iterator, List, Optional, Tuple, Union
|
||||
from typing import Iterable, Iterator, List, Optional, Tuple, Union
|
||||
|
||||
from llnl.util.tty import color
|
||||
|
||||
import spack.config
|
||||
import spack.deptypes
|
||||
import spack.error
|
||||
import spack.paths
|
||||
@@ -162,6 +164,15 @@ def tokenize(text: str) -> Iterator[Token]:
|
||||
yield token
|
||||
|
||||
|
||||
def parseable_tokens(text: str) -> Iterator[Token]:
|
||||
"""Return non-whitespace tokens from the text passed as input
|
||||
|
||||
Raises:
|
||||
SpecTokenizationError: when unexpected characters are found in the text
|
||||
"""
|
||||
return filter(lambda x: x.kind != SpecTokens.WS, tokenize(text))
|
||||
|
||||
|
||||
class TokenContext:
|
||||
"""Token context passed around by parsers"""
|
||||
|
||||
@@ -189,6 +200,16 @@ def accept(self, kind: SpecTokens):
|
||||
def expect(self, *kinds: SpecTokens):
|
||||
return self.next_token and self.next_token.kind in kinds
|
||||
|
||||
def push(self, token_stream: Iterator[Token]):
|
||||
# New tokens need to go before next_token, which comes before the rest of the stream
|
||||
next_token_iterator: Iterable[Token] = (
|
||||
iter((self.next_token,)) if self.next_token else iter(())
|
||||
)
|
||||
self.token_stream = itertools.chain(token_stream, next_token_iterator, self.token_stream)
|
||||
self.current_token = None
|
||||
self.next_token = None
|
||||
self.advance()
|
||||
|
||||
|
||||
class SpecTokenizationError(spack.error.SpecSyntaxError):
|
||||
"""Syntax error in a spec string"""
|
||||
@@ -238,11 +259,13 @@ class SpecParser:
|
||||
|
||||
def __init__(self, literal_str: str):
|
||||
self.literal_str = literal_str
|
||||
self.ctx = TokenContext(filter(lambda x: x.kind != SpecTokens.WS, tokenize(literal_str)))
|
||||
self.ctx = TokenContext(parseable_tokens(literal_str))
|
||||
|
||||
def tokens(self) -> List[Token]:
|
||||
"""Return the entire list of token from the initial text. White spaces are
|
||||
filtered out.
|
||||
|
||||
Note: This list will not show tokens pushed when parsing an alias
|
||||
"""
|
||||
return list(filter(lambda x: x.kind != SpecTokens.WS, tokenize(self.literal_str)))
|
||||
|
||||
@@ -268,6 +291,9 @@ def add_dependency(dep, **edge_properties):
|
||||
except spack.error.SpecError as e:
|
||||
raise SpecParsingError(str(e), self.ctx.current_token, self.literal_str) from e
|
||||
|
||||
# Get toolchain information outside of loop
|
||||
toolchains = spack.config.CONFIG.get("toolchains", {})
|
||||
|
||||
initial_spec = initial_spec or spack.spec.Spec()
|
||||
root_spec, parser_warnings = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec)
|
||||
current_spec = root_spec
|
||||
@@ -297,6 +323,15 @@ def add_dependency(dep, **edge_properties):
|
||||
add_dependency(dependency, **edge_properties)
|
||||
|
||||
elif self.ctx.accept(SpecTokens.DEPENDENCY):
|
||||
# String replacement for toolchains
|
||||
# Look ahead to match upcoming value to list of toolchains
|
||||
if self.ctx.next_token.value in toolchains:
|
||||
assert self.ctx.accept(SpecTokens.UNQUALIFIED_PACKAGE_NAME)
|
||||
# accepting the token advances it to be the current token
|
||||
# Push associated tokens back to the TokenContext
|
||||
self.ctx.push(parseable_tokens(toolchains[self.ctx.current_token.value]))
|
||||
continue
|
||||
|
||||
is_direct = self.ctx.current_token.value[0] == "%"
|
||||
dependency, warnings = self._parse_node(root_spec)
|
||||
edge_properties = {}
|
||||
@@ -511,10 +546,10 @@ def parse(self):
|
||||
name = name[:-1]
|
||||
value = value.strip("'\" ").split(",")
|
||||
attributes[name] = value
|
||||
if name not in ("deptypes", "virtuals"):
|
||||
if name not in ("deptypes", "virtuals", "when"):
|
||||
msg = (
|
||||
"the only edge attributes that are currently accepted "
|
||||
'are "deptypes" and "virtuals"'
|
||||
'are "deptypes", "virtuals", and "when"'
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
# TODO: Add code to accept bool variants here as soon as use variants are implemented
|
||||
@@ -528,6 +563,11 @@ def parse(self):
|
||||
if "deptypes" in attributes:
|
||||
deptype_string = attributes.pop("deptypes")
|
||||
attributes["depflag"] = spack.deptypes.canonicalize(deptype_string)
|
||||
|
||||
# Turn "when" into a spec
|
||||
if "when" in attributes:
|
||||
attributes["when"] = spack.spec.Spec(attributes["when"][0])
|
||||
|
||||
return attributes
|
||||
|
||||
|
||||
@@ -573,8 +613,9 @@ class SpecParsingError(spack.error.SpecSyntaxError):
|
||||
|
||||
def __init__(self, message, token, text):
|
||||
message += f"\n{text}"
|
||||
underline = f"\n{' '*token.start}{'^'*(token.end - token.start)}"
|
||||
message += color.colorize(f"@*r{{{underline}}}")
|
||||
if token:
|
||||
underline = f"\n{' '*token.start}{'^'*(token.end - token.start)}"
|
||||
message += color.colorize(f"@*r{{{underline}}}")
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
|
||||
@@ -28,15 +28,9 @@
|
||||
(["invalid-selfhosted-gitlab-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has a stand-alone test method in build-time callbacks
|
||||
(["fail-test-audit"], ["PKG-PROPERTIES"]),
|
||||
# This package implements and uses several deprecated stand-alone test methods
|
||||
pytest.param(
|
||||
["fail-test-audit-deprecated"],
|
||||
["PKG-DEPRECATED-ATTRIBUTES"],
|
||||
marks=pytest.mark.xfail(
|
||||
reason="inspect.getsource() reads the source file, "
|
||||
"which misses an injected import line"
|
||||
),
|
||||
),
|
||||
# This package implements and uses several deprecated stand-alone
|
||||
# test methods
|
||||
(["fail-test-audit-deprecated"], ["PKG-DEPRECATED-ATTRIBUTES"]),
|
||||
# This package has stand-alone test methods without non-trivial docstrings
|
||||
(["fail-test-audit-docstring"], ["PKG-PROPERTIES"]),
|
||||
# This package has a stand-alone test method without an implementation
|
||||
|
||||
@@ -241,13 +241,13 @@ def test_default_rpaths_create_install_default_layout(temporary_mirror_dir):
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
|
||||
# Test installing from build caches
|
||||
buildcache_cmd("install", "-uo", cspec.name, sy_spec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
|
||||
|
||||
# This gives warning that spec is already installed
|
||||
buildcache_cmd("install", "-uo", cspec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name)
|
||||
|
||||
# Test overwrite install
|
||||
buildcache_cmd("install", "-ufo", cspec.name)
|
||||
buildcache_cmd("install", "-fu", cspec.name)
|
||||
|
||||
buildcache_cmd("keys", "-f")
|
||||
buildcache_cmd("list")
|
||||
@@ -273,10 +273,10 @@ def test_default_rpaths_install_nondefault_layout(temporary_mirror_dir):
|
||||
|
||||
# Install some packages with dependent packages
|
||||
# test install in non-default install path scheme
|
||||
buildcache_cmd("install", "-uo", cspec.name, sy_spec.name)
|
||||
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
|
||||
|
||||
# Test force install in non-default install path scheme
|
||||
buildcache_cmd("install", "-ufo", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*required_executables)
|
||||
@@ -298,19 +298,19 @@ def test_relative_rpaths_install_default_layout(temporary_mirror_dir):
|
||||
cspec = spack.concretize.concretize_one("corge")
|
||||
|
||||
# Install buildcache created with relativized rpaths
|
||||
buildcache_cmd("install", "-ufo", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# This gives warning that spec is already installed
|
||||
buildcache_cmd("install", "-ufo", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# Uninstall the package and deps
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
|
||||
# Install build cache
|
||||
buildcache_cmd("install", "-ufo", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
# Test overwrite install
|
||||
buildcache_cmd("install", "-ufo", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*required_executables)
|
||||
@@ -327,7 +327,7 @@ def test_relative_rpaths_install_nondefault(temporary_mirror_dir):
|
||||
cspec = spack.concretize.concretize_one("corge")
|
||||
|
||||
# Test install in non-default install path scheme and relative path
|
||||
buildcache_cmd("install", "-ufo", cspec.name)
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
|
||||
|
||||
def test_push_and_fetch_keys(mock_gnupghome, tmp_path):
|
||||
|
||||
@@ -12,7 +12,8 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack
|
||||
import spack.build_systems.autotools
|
||||
import spack.build_systems.cmake
|
||||
import spack.builder
|
||||
import spack.concretize
|
||||
import spack.environment
|
||||
@@ -27,8 +28,6 @@
|
||||
|
||||
DATA_PATH = os.path.join(spack.paths.test_path, "data")
|
||||
|
||||
pytestmark = pytest.mark.skip(reason="build_systems module is moved out of spack")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def concretize_and_setup(default_mock_concretization, monkeypatch):
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
|
||||
@pytest.fixture()
|
||||
def builder_test_repository(config):
|
||||
builder_test_path = os.path.join(spack.paths.test_repos_path, "spack_repo", "builder_test")
|
||||
builder_test_path = os.path.join(spack.paths.test_repos_path, "builder.test")
|
||||
with spack.repo.use_repositories(builder_test_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
||||
@@ -549,10 +549,11 @@ def test_url_buildcache_entry_v2_exists(
|
||||
):
|
||||
"""Test existence check for v2 buildcache entries"""
|
||||
test_mirror_path = v2_buildcache_layout("unsigned")
|
||||
mirror_url = pathlib.Path(test_mirror_path).as_uri()
|
||||
mirror_url = f"file://{test_mirror_path}"
|
||||
mirror("add", "v2mirror", mirror_url)
|
||||
|
||||
output = buildcache("list", "-a", "-l")
|
||||
with capsys.disabled():
|
||||
output = buildcache("list", "-a", "-l")
|
||||
|
||||
assert "Fetching an index from a v2 binary mirror layout" in output
|
||||
assert "is deprecated" in output
|
||||
|
||||
@@ -15,8 +15,6 @@
|
||||
|
||||
compiler = spack.main.SpackCommand("compiler")
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def compilers_dir(mock_executable):
|
||||
@@ -82,7 +80,7 @@ def test_compiler_find_without_paths(no_packages_yaml, working_env, mock_executa
|
||||
|
||||
|
||||
@pytest.mark.regression("37996")
|
||||
def test_compiler_remove(mutable_config):
|
||||
def test_compiler_remove(mutable_config, mock_packages):
|
||||
"""Tests that we can remove a compiler from configuration."""
|
||||
assert any(
|
||||
compiler.satisfies("gcc@=9.4.0") for compiler in spack.compilers.config.all_compilers()
|
||||
@@ -95,7 +93,7 @@ def test_compiler_remove(mutable_config):
|
||||
|
||||
|
||||
@pytest.mark.regression("37996")
|
||||
def test_removing_compilers_from_multiple_scopes(mutable_config):
|
||||
def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
|
||||
# Duplicate "site" scope into "user" scope
|
||||
site_config = spack.config.get("packages", scope="site")
|
||||
spack.config.set("packages", site_config, scope="user")
|
||||
@@ -191,12 +189,12 @@ def test_compiler_find_path_order(no_packages_yaml, working_env, compilers_dir):
|
||||
}
|
||||
|
||||
|
||||
def test_compiler_list_empty(no_packages_yaml, compilers_dir, monkeypatch):
|
||||
def test_compiler_list_empty(no_packages_yaml, working_env, compilers_dir):
|
||||
"""Spack should not automatically search for compilers when listing them and none are
|
||||
available. And when stdout is not a tty like in tests, there should be no output and
|
||||
no error exit code.
|
||||
"""
|
||||
monkeypatch.setenv("PATH", str(compilers_dir), prepend=":")
|
||||
os.environ["PATH"] = str(compilers_dir)
|
||||
out = compiler("list")
|
||||
assert not out
|
||||
assert compiler.returncode == 0
|
||||
|
||||
@@ -2,39 +2,134 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.cmd.diff
|
||||
import spack.concretize
|
||||
import spack.main
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.test.conftest import create_test_repo
|
||||
|
||||
install_cmd = spack.main.SpackCommand("install")
|
||||
diff_cmd = spack.main.SpackCommand("diff")
|
||||
find_cmd = spack.main.SpackCommand("find")
|
||||
|
||||
|
||||
_p1 = (
|
||||
"p1",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class P1(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p1var", default=True)
|
||||
variant("usev1", default=True)
|
||||
|
||||
depends_on("p2")
|
||||
depends_on("v1", when="+usev1")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_p2 = (
|
||||
"p2",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class P2(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p2var", default=True)
|
||||
|
||||
depends_on("p3")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_p3 = (
|
||||
"p3",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class P3(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p3var", default=True)
|
||||
""",
|
||||
)
|
||||
|
||||
_i1 = (
|
||||
"i1",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class I1(Package):
|
||||
version("1.0")
|
||||
|
||||
provides("v1")
|
||||
|
||||
variant("i1var", default=True)
|
||||
|
||||
depends_on("p3")
|
||||
depends_on("p4")
|
||||
""",
|
||||
)
|
||||
|
||||
_i2 = (
|
||||
"i2",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class I2(Package):
|
||||
version("1.0")
|
||||
|
||||
provides("v1")
|
||||
|
||||
variant("i2var", default=True)
|
||||
|
||||
depends_on("p3")
|
||||
depends_on("p4")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_p4 = (
|
||||
"p4",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class P4(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p4var", default=True)
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
# Note that the hash of p1 will differ depending on the variant chosen
|
||||
# we probably always want to omit that from diffs
|
||||
# p1____
|
||||
# | \
|
||||
# p2 v1
|
||||
# | ____/ |
|
||||
# p3 p4
|
||||
@pytest.fixture
|
||||
def _create_test_repo(tmpdir, mutable_config):
|
||||
"""
|
||||
p1____
|
||||
| \
|
||||
p2 v1
|
||||
| ____/ |
|
||||
p3 p4
|
||||
|
||||
# i1 and i2 provide v1 (and both have the same dependencies)
|
||||
i1 and i2 provide v1 (and both have the same dependencies)
|
||||
|
||||
# All packages have an associated variant
|
||||
All packages have an associated variant
|
||||
"""
|
||||
yield create_test_repo(tmpdir, [_p1, _p2, _p3, _i1, _i2, _p4])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(config):
|
||||
builder_test_path = os.path.join(spack.paths.test_repos_path, "spack_repo", "diff")
|
||||
with spack.repo.use_repositories(builder_test_path) as mock_repo:
|
||||
yield mock_repo
|
||||
def test_repo(_create_test_repo, monkeypatch, mock_stage):
|
||||
with spack.repo.use_repositories(_create_test_repo) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
def test_diff_ignore(test_repo):
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
import spack.repo
|
||||
import spack.util.editor
|
||||
from spack.build_systems import autotools, cmake
|
||||
from spack.main import SpackCommand
|
||||
|
||||
edit = SpackCommand("edit")
|
||||
@@ -28,15 +29,13 @@ def editor(*args: str, **kwargs):
|
||||
assert called
|
||||
|
||||
|
||||
def test_edit_files(monkeypatch, mock_packages):
|
||||
def test_edit_files(monkeypatch):
|
||||
"""Test spack edit --build-system autotools cmake"""
|
||||
called = False
|
||||
|
||||
def editor(*args: str, **kwargs):
|
||||
nonlocal called
|
||||
called = True
|
||||
from spack_repo.builtin_mock.build_systems import autotools, cmake # type: ignore
|
||||
|
||||
assert os.path.samefile(args[0], autotools.__file__)
|
||||
assert os.path.samefile(args[1], cmake.__file__)
|
||||
|
||||
|
||||
@@ -886,12 +886,12 @@ def test_env_activate_broken_view(
|
||||
with spack.repo.use_repositories(mock_custom_repository):
|
||||
wrong_repo = env("activate", "--sh", "test")
|
||||
assert "Warning: could not load runtime environment" in wrong_repo
|
||||
assert "Unknown namespace: builtin_mock" in wrong_repo
|
||||
assert "Unknown namespace: builtin.mock" in wrong_repo
|
||||
|
||||
# test replacing repo fixes it
|
||||
normal_repo = env("activate", "--sh", "test")
|
||||
assert "Warning: could not load runtime environment" not in normal_repo
|
||||
assert "Unknown namespace: builtin_mock" not in normal_repo
|
||||
assert "Unknown namespace: builtin.mock" not in normal_repo
|
||||
|
||||
|
||||
def test_to_lockfile_dict():
|
||||
@@ -916,7 +916,7 @@ def test_env_repo():
|
||||
|
||||
pkg_cls = e.repo.get_pkg_class("mpileaks")
|
||||
assert pkg_cls.name == "mpileaks"
|
||||
assert pkg_cls.namespace == "builtin_mock"
|
||||
assert pkg_cls.namespace == "builtin.mock"
|
||||
|
||||
|
||||
def test_user_removed_spec(environment_from_manifest):
|
||||
@@ -4286,7 +4286,7 @@ def test_env_include_packages_url(
|
||||
"""Test inclusion of a (GitHub) URL."""
|
||||
develop_url = "https://github.com/fake/fake/blob/develop/"
|
||||
default_packages = develop_url + "etc/fake/defaults/packages.yaml"
|
||||
sha256 = "6a1b26c857ca7e5bcd7342092e2f218da43d64b78bd72771f603027ea3c8b4af"
|
||||
sha256 = "8b69d9c6e983dfb8bac2ddc3910a86265cffdd9c85f905c716d426ec5b0d9847"
|
||||
spack_yaml = tmpdir.join("spack.yaml")
|
||||
with spack_yaml.open("w") as f:
|
||||
f.write(
|
||||
|
||||
@@ -18,8 +18,6 @@
|
||||
from spack.main import SpackCommand
|
||||
from spack.spec import Spec
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def executables_found(monkeypatch):
|
||||
@@ -38,6 +36,40 @@ def define_plat_exe(exe):
|
||||
return exe
|
||||
|
||||
|
||||
def test_find_external_single_package(mock_executable):
|
||||
cmake_path = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
search_dir = cmake_path.parent.parent
|
||||
|
||||
specs_by_package = spack.detection.by_path(["cmake"], path_hints=[str(search_dir)])
|
||||
|
||||
assert len(specs_by_package) == 1 and "cmake" in specs_by_package
|
||||
detected_spec = specs_by_package["cmake"]
|
||||
assert len(detected_spec) == 1 and detected_spec[0] == Spec("cmake@1.foo")
|
||||
|
||||
|
||||
def test_find_external_two_instances_same_package(mock_executable):
|
||||
# Each of these cmake instances is created in a different prefix
|
||||
# In Windows, quoted strings are echo'd with quotes includes
|
||||
# we need to avoid that for proper regex.
|
||||
cmake1 = mock_executable("cmake", output="echo cmake version 1.foo", subdir=("base1", "bin"))
|
||||
cmake2 = mock_executable("cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin"))
|
||||
search_paths = [str(cmake1.parent.parent), str(cmake2.parent.parent)]
|
||||
|
||||
finder = spack.detection.path.ExecutablesFinder()
|
||||
detected_specs = finder.find(
|
||||
pkg_name="cmake", initial_guess=search_paths, repository=spack.repo.PATH
|
||||
)
|
||||
|
||||
assert len(detected_specs) == 2
|
||||
spec_to_path = {s: s.external_path for s in detected_specs}
|
||||
assert spec_to_path[Spec("cmake@1.foo")] == (
|
||||
spack.detection.executable_prefix(str(cmake1.parent))
|
||||
), spec_to_path
|
||||
assert spec_to_path[Spec("cmake@3.17.2")] == (
|
||||
spack.detection.executable_prefix(str(cmake2.parent))
|
||||
)
|
||||
|
||||
|
||||
def test_find_external_update_config(mutable_config):
|
||||
entries = [
|
||||
Spec.from_detection("cmake@1.foo", external_path="/x/y1"),
|
||||
@@ -69,24 +101,13 @@ def test_get_executables(working_env, mock_executable):
|
||||
# TODO: this test should be made to work, but in the meantime it is
|
||||
# causing intermittent (spurious) CI failures on all PRs
|
||||
@pytest.mark.not_on_windows("Test fails intermittently on Windows")
|
||||
def test_find_external_cmd_not_buildable(
|
||||
mutable_config, working_env, mock_executable, monkeypatch
|
||||
):
|
||||
def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_executable):
|
||||
"""When the user invokes 'spack external find --not-buildable', the config
|
||||
for any package where Spack finds an external version should be marked as
|
||||
not buildable.
|
||||
"""
|
||||
version = "1.foo"
|
||||
|
||||
@classmethod
|
||||
def _determine_version(cls, exe):
|
||||
return version
|
||||
|
||||
cmake_cls = spack.repo.PATH.get_pkg_class("cmake")
|
||||
monkeypatch.setattr(cmake_cls, "determine_version", _determine_version)
|
||||
|
||||
cmake_path = mock_executable("cmake", output=f"echo cmake version {version}")
|
||||
os.environ["PATH"] = str(cmake_path.parent)
|
||||
cmake_path1 = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
os.environ["PATH"] = os.pathsep.join([os.path.dirname(cmake_path1)])
|
||||
external("find", "--not-buildable", "cmake")
|
||||
pkgs_cfg = spack.config.get("packages")
|
||||
assert "cmake" in pkgs_cfg
|
||||
@@ -102,51 +123,37 @@ def _determine_version(cls, exe):
|
||||
["detectable"],
|
||||
[],
|
||||
[
|
||||
"builtin_mock.cmake",
|
||||
"builtin_mock.find-externals1",
|
||||
"builtin_mock.gcc",
|
||||
"builtin_mock.intel-oneapi-compilers",
|
||||
"builtin_mock.llvm",
|
||||
"builtin_mock.mpich",
|
||||
"builtin.mock.find-externals1",
|
||||
"builtin.mock.gcc",
|
||||
"builtin.mock.llvm",
|
||||
"builtin.mock.intel-oneapi-compilers",
|
||||
],
|
||||
),
|
||||
# find --all --exclude find-externals1
|
||||
(
|
||||
None,
|
||||
["detectable"],
|
||||
["builtin_mock.find-externals1"],
|
||||
[
|
||||
"builtin_mock.cmake",
|
||||
"builtin_mock.gcc",
|
||||
"builtin_mock.intel-oneapi-compilers",
|
||||
"builtin_mock.llvm",
|
||||
"builtin_mock.mpich",
|
||||
],
|
||||
["builtin.mock.find-externals1"],
|
||||
["builtin.mock.gcc", "builtin.mock.llvm", "builtin.mock.intel-oneapi-compilers"],
|
||||
),
|
||||
(
|
||||
None,
|
||||
["detectable"],
|
||||
["find-externals1"],
|
||||
[
|
||||
"builtin_mock.cmake",
|
||||
"builtin_mock.gcc",
|
||||
"builtin_mock.intel-oneapi-compilers",
|
||||
"builtin_mock.llvm",
|
||||
"builtin_mock.mpich",
|
||||
],
|
||||
["builtin.mock.gcc", "builtin.mock.llvm", "builtin.mock.intel-oneapi-compilers"],
|
||||
),
|
||||
# find hwloc (and mock hwloc is not detectable)
|
||||
(["hwloc"], ["detectable"], [], []),
|
||||
# find cmake (and cmake is not detectable)
|
||||
(["cmake"], ["detectable"], [], []),
|
||||
],
|
||||
)
|
||||
def test_package_selection(names, tags, exclude, expected):
|
||||
def test_package_selection(names, tags, exclude, expected, mutable_mock_repo):
|
||||
"""Tests various cases of selecting packages"""
|
||||
# In the mock repo we only have 'find-externals1' that is detectable
|
||||
result = spack.cmd.external.packages_to_search_for(names=names, tags=tags, exclude=exclude)
|
||||
assert set(result) == set(expected)
|
||||
|
||||
|
||||
def test_find_external_no_manifest(mutable_config, working_env, monkeypatch):
|
||||
def test_find_external_no_manifest(mutable_config, working_env, mutable_mock_repo, monkeypatch):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files does not exist. Ensure that the command does not
|
||||
fail.
|
||||
@@ -159,7 +166,7 @@ def test_find_external_no_manifest(mutable_config, working_env, monkeypatch):
|
||||
|
||||
|
||||
def test_find_external_empty_default_manifest_dir(
|
||||
mutable_config, working_env, tmpdir, monkeypatch
|
||||
mutable_config, working_env, mutable_mock_repo, tmpdir, monkeypatch
|
||||
):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files exists but is empty. Ensure that the command does not
|
||||
@@ -174,7 +181,7 @@ def test_find_external_empty_default_manifest_dir(
|
||||
@pytest.mark.not_on_windows("Can't chmod on Windows")
|
||||
@pytest.mark.skipif(getuid() == 0, reason="user is root")
|
||||
def test_find_external_manifest_with_bad_permissions(
|
||||
mutable_config, working_env, tmpdir, monkeypatch
|
||||
mutable_config, working_env, mutable_mock_repo, tmpdir, monkeypatch
|
||||
):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files exists but with insufficient permissions. Check that
|
||||
@@ -194,7 +201,7 @@ def test_find_external_manifest_with_bad_permissions(
|
||||
os.chmod(test_manifest_file_path, 0o700)
|
||||
|
||||
|
||||
def test_find_external_manifest_failure(mutable_config, tmpdir, monkeypatch):
|
||||
def test_find_external_manifest_failure(mutable_config, mutable_mock_repo, tmpdir, monkeypatch):
|
||||
"""The user runs 'spack external find'; the manifest parsing fails with
|
||||
some exception. Ensure that the command still succeeds (i.e. moves on
|
||||
to other external detection mechanisms).
|
||||
@@ -214,7 +221,7 @@ def fail():
|
||||
assert "Skipping manifest and continuing" in output
|
||||
|
||||
|
||||
def test_find_external_merge(mutable_config, tmp_path):
|
||||
def test_find_external_merge(mutable_config, mutable_mock_repo, tmp_path):
|
||||
"""Checks that 'spack find external' doesn't overwrite an existing spec in packages.yaml."""
|
||||
pkgs_cfg_init = {
|
||||
"find-externals1": {
|
||||
@@ -240,7 +247,7 @@ def test_find_external_merge(mutable_config, tmp_path):
|
||||
assert {"spec": "find-externals1@1.2", "prefix": "/x/y2"} in pkg_externals
|
||||
|
||||
|
||||
def test_list_detectable_packages(mutable_config):
|
||||
def test_list_detectable_packages(mutable_config, mutable_mock_repo):
|
||||
external("list")
|
||||
assert external.returncode == 0
|
||||
|
||||
@@ -286,23 +293,13 @@ def test_new_entries_are_reported_correctly(mock_executable, mutable_config, mon
|
||||
|
||||
|
||||
@pytest.mark.parametrize("command_args", [("-t", "build-tools"), ("-t", "build-tools", "cmake")])
|
||||
@pytest.mark.not_on_windows("the test uses bash scripts")
|
||||
def test_use_tags_for_detection(command_args, mock_executable, mutable_config, monkeypatch):
|
||||
versions = {"cmake": "3.19.1", "openssl": "2.8.3"}
|
||||
|
||||
@classmethod
|
||||
def _determine_version(cls, exe):
|
||||
return versions[os.path.basename(exe)]
|
||||
|
||||
cmake_cls = spack.repo.PATH.get_pkg_class("cmake")
|
||||
monkeypatch.setattr(cmake_cls, "determine_version", _determine_version)
|
||||
|
||||
# Prepare an environment to detect a fake cmake
|
||||
cmake_exe = mock_executable("cmake", output=f"echo cmake version {versions['cmake']}")
|
||||
cmake_exe = mock_executable("cmake", output="echo cmake version 3.19.1")
|
||||
prefix = os.path.dirname(cmake_exe)
|
||||
monkeypatch.setenv("PATH", prefix)
|
||||
|
||||
openssl_exe = mock_executable("openssl", output=f"OpenSSL {versions['openssl']}")
|
||||
openssl_exe = mock_executable("openssl", output="OpenSSL 2.8.3")
|
||||
prefix = os.path.dirname(openssl_exe)
|
||||
monkeypatch.setenv("PATH", prefix)
|
||||
|
||||
@@ -319,16 +316,6 @@ def test_failures_in_scanning_do_not_result_in_an_error(
|
||||
mock_executable, monkeypatch, mutable_config
|
||||
):
|
||||
"""Tests that scanning paths with wrong permissions, won't cause `external find` to error."""
|
||||
versions = {"first": "3.19.1", "second": "3.23.3"}
|
||||
|
||||
@classmethod
|
||||
def _determine_version(cls, exe):
|
||||
bin_parent = os.path.dirname(exe).split(os.sep)[-2]
|
||||
return versions[bin_parent]
|
||||
|
||||
cmake_cls = spack.repo.PATH.get_pkg_class("cmake")
|
||||
monkeypatch.setattr(cmake_cls, "determine_version", _determine_version)
|
||||
|
||||
cmake_exe1 = mock_executable(
|
||||
"cmake", output="echo cmake version 3.19.1", subdir=("first", "bin")
|
||||
)
|
||||
@@ -346,30 +333,21 @@ def _determine_version(cls, exe):
|
||||
assert external.returncode == 0
|
||||
assert "The following specs have been" in output
|
||||
assert "cmake" in output
|
||||
for vers in versions.values():
|
||||
assert vers in output
|
||||
assert "3.23.3" in output
|
||||
assert "3.19.1" not in output
|
||||
|
||||
|
||||
def test_detect_virtuals(mock_executable, mutable_config, monkeypatch):
|
||||
"""Test whether external find --not-buildable sets virtuals as non-buildable (unless user
|
||||
config sets them to buildable)"""
|
||||
version = "4.0.2"
|
||||
|
||||
@classmethod
|
||||
def _determine_version(cls, exe):
|
||||
return version
|
||||
|
||||
cmake_cls = spack.repo.PATH.get_pkg_class("mpich")
|
||||
monkeypatch.setattr(cmake_cls, "determine_version", _determine_version)
|
||||
|
||||
mpich = mock_executable("mpichversion", output=f"echo MPICH Version: {version}")
|
||||
mpich = mock_executable("mpichversion", output="echo MPICH Version: 4.0.2")
|
||||
prefix = os.path.dirname(mpich)
|
||||
external("find", "--path", prefix, "--not-buildable", "mpich")
|
||||
|
||||
# Check that mpich was correctly detected
|
||||
mpich = mutable_config.get("packages:mpich")
|
||||
assert mpich["buildable"] is False
|
||||
assert Spec(mpich["externals"][0]["spec"]).satisfies(f"mpich@{version}")
|
||||
assert Spec(mpich["externals"][0]["spec"]).satisfies("mpich@4.0.2")
|
||||
|
||||
# Check that the virtual package mpi was marked as non-buildable
|
||||
assert mutable_config.get("packages:mpi:buildable") is False
|
||||
|
||||
@@ -14,12 +14,12 @@
|
||||
import spack.cmd.find
|
||||
import spack.concretize
|
||||
import spack.environment as ev
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
from spack.enums import InstallRecordStatus
|
||||
from spack.main import SpackCommand
|
||||
from spack.test.conftest import create_test_repo
|
||||
from spack.test.utilities import SpackCommandArgs
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
@@ -129,7 +129,7 @@ def test_tag2_tag3(parser, specs):
|
||||
@pytest.mark.db
|
||||
def test_namespaces_shown_correctly(args, with_namespace, database):
|
||||
"""Test that --namespace(s) works. Old syntax is --namespace"""
|
||||
assert ("builtin_mock.zmpi" in find(*args)) == with_namespace
|
||||
assert ("builtin.mock.zmpi" in find(*args)) == with_namespace
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
@@ -462,16 +462,89 @@ def test_environment_with_version_range_in_compiler_doesnt_fail(tmp_path, mock_p
|
||||
assert "zlib" in output
|
||||
|
||||
|
||||
# a0 d0
|
||||
# / \ / \
|
||||
# b0 c0 e0
|
||||
_pkga = (
|
||||
"a0",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class A0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
|
||||
depends_on("b0")
|
||||
depends_on("c0")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgb = (
|
||||
"b0",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class B0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgc = (
|
||||
"c0",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class C0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
|
||||
tags = ["tag0", "tag1"]
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgd = (
|
||||
"d0",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class D0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
|
||||
depends_on("c0")
|
||||
depends_on("e0")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkge = (
|
||||
"e0",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class E0(Package):
|
||||
tags = ["tag1", "tag2"]
|
||||
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(mock_stage):
|
||||
with spack.repo.use_repositories(
|
||||
os.path.join(spack.paths.test_repos_path, "spack_repo", "find")
|
||||
) as mock_repo_path:
|
||||
def _create_test_repo(tmpdir, mutable_config):
|
||||
r"""
|
||||
a0 d0
|
||||
/ \ / \
|
||||
b0 c0 e0
|
||||
"""
|
||||
yield create_test_repo(tmpdir, [_pkga, _pkgb, _pkgc, _pkgd, _pkge])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(_create_test_repo, monkeypatch, mock_stage):
|
||||
with spack.repo.use_repositories(_create_test_repo) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
|
||||
@@ -143,13 +143,13 @@ def test_list_count():
|
||||
|
||||
def test_list_repos():
|
||||
with spack.repo.use_repositories(
|
||||
os.path.join(spack.paths.test_repos_path, "spack_repo", "builtin_mock"),
|
||||
os.path.join(spack.paths.test_repos_path, "spack_repo", "builder_test"),
|
||||
os.path.join(spack.paths.test_repos_path, "builtin.mock"),
|
||||
os.path.join(spack.paths.test_repos_path, "builder.test"),
|
||||
):
|
||||
total_pkgs = len(list().strip().split())
|
||||
mock_pkgs = len(list("-r", "builtin_mock").strip().split())
|
||||
builder_pkgs = len(list("-r", "builder_test").strip().split())
|
||||
both_repos = len(list("-r", "builtin_mock", "-r", "builder_test").strip().split())
|
||||
mock_pkgs = len(list("-r", "builtin.mock").strip().split())
|
||||
builder_pkgs = len(list("-r", "builder.test").strip().split())
|
||||
both_repos = len(list("-r", "builtin.mock", "-r", "builder.test").strip().split())
|
||||
|
||||
assert total_pkgs > mock_pkgs > builder_pkgs
|
||||
assert both_repos == total_pkgs
|
||||
|
||||
@@ -39,9 +39,7 @@ def install(self, spec, prefix):
|
||||
def mock_pkg_git_repo(git, tmp_path_factory):
|
||||
"""Copy the builtin.mock repo and make a mutable git repo inside it."""
|
||||
root_dir = tmp_path_factory.mktemp("mock_pkg_git_repo")
|
||||
# create spack_repo subdir
|
||||
(root_dir / "spack_repo").mkdir()
|
||||
repo_dir = root_dir / "spack_repo" / "builtin_mock"
|
||||
repo_dir = root_dir / "builtin.mock"
|
||||
shutil.copytree(spack.paths.mock_packages_path, str(repo_dir))
|
||||
|
||||
repo_cache = spack.util.file_cache.FileCache(root_dir / "cache")
|
||||
@@ -59,25 +57,25 @@ def mock_pkg_git_repo(git, tmp_path_factory):
|
||||
git("-c", "commit.gpgsign=false", "commit", "-m", "initial mock repo commit")
|
||||
|
||||
# add commit with mockpkg-a, mockpkg-b, mockpkg-c packages
|
||||
mkdirp("mockpkg_a", "mockpkg_b", "mockpkg_c")
|
||||
with open("mockpkg_a/package.py", "w", encoding="utf-8") as f:
|
||||
mkdirp("mockpkg-a", "mockpkg-b", "mockpkg-c")
|
||||
with open("mockpkg-a/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgA"))
|
||||
with open("mockpkg_b/package.py", "w", encoding="utf-8") as f:
|
||||
with open("mockpkg-b/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgB"))
|
||||
with open("mockpkg_c/package.py", "w", encoding="utf-8") as f:
|
||||
with open("mockpkg-c/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgC"))
|
||||
git("add", "mockpkg_a", "mockpkg_b", "mockpkg_c")
|
||||
git("add", "mockpkg-a", "mockpkg-b", "mockpkg-c")
|
||||
git("-c", "commit.gpgsign=false", "commit", "-m", "add mockpkg-a, mockpkg-b, mockpkg-c")
|
||||
|
||||
# remove mockpkg-c, add mockpkg-d
|
||||
with open("mockpkg_b/package.py", "a", encoding="utf-8") as f:
|
||||
with open("mockpkg-b/package.py", "a", encoding="utf-8") as f:
|
||||
f.write("\n# change mockpkg-b")
|
||||
git("add", "mockpkg_b")
|
||||
mkdirp("mockpkg_d")
|
||||
with open("mockpkg_d/package.py", "w", encoding="utf-8") as f:
|
||||
git("add", "mockpkg-b")
|
||||
mkdirp("mockpkg-d")
|
||||
with open("mockpkg-d/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgD"))
|
||||
git("add", "mockpkg_d")
|
||||
git("rm", "-rf", "mockpkg_c")
|
||||
git("add", "mockpkg-d")
|
||||
git("rm", "-rf", "mockpkg-c")
|
||||
git(
|
||||
"-c",
|
||||
"commit.gpgsign=false",
|
||||
@@ -92,7 +90,7 @@ def mock_pkg_git_repo(git, tmp_path_factory):
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def mock_pkg_names():
|
||||
repo = spack.repo.PATH.get_repo("builtin_mock")
|
||||
repo = spack.repo.PATH.get_repo("builtin.mock")
|
||||
|
||||
# Be sure to include virtual packages since packages with stand-alone
|
||||
# tests may inherit additional tests from the virtuals they provide,
|
||||
@@ -113,28 +111,27 @@ def split(output):
|
||||
pkg = spack.main.SpackCommand("pkg")
|
||||
|
||||
|
||||
@pytest.mark.requires_builtin("builtin repository path must exist")
|
||||
def test_builtin_repo():
|
||||
assert spack.repo.builtin_repo() is spack.repo.PATH.get_repo("builtin")
|
||||
|
||||
|
||||
def test_mock_builtin_repo(mock_packages):
|
||||
assert spack.repo.builtin_repo() is spack.repo.PATH.get_repo("builtin_mock")
|
||||
assert spack.repo.builtin_repo() is spack.repo.PATH.get_repo("builtin.mock")
|
||||
|
||||
|
||||
def test_pkg_add(git, mock_pkg_git_repo):
|
||||
with working_dir(mock_pkg_git_repo):
|
||||
mkdirp("mockpkg_e")
|
||||
with open("mockpkg_e/package.py", "w", encoding="utf-8") as f:
|
||||
mkdirp("mockpkg-e")
|
||||
with open("mockpkg-e/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgE"))
|
||||
|
||||
pkg("add", "mockpkg-e")
|
||||
|
||||
with working_dir(mock_pkg_git_repo):
|
||||
try:
|
||||
assert "A mockpkg_e/package.py" in git("status", "--short", output=str)
|
||||
assert "A mockpkg-e/package.py" in git("status", "--short", output=str)
|
||||
finally:
|
||||
shutil.rmtree("mockpkg_e")
|
||||
shutil.rmtree("mockpkg-e")
|
||||
# Removing a package mid-run disrupts Spack's caching
|
||||
if spack.repo.PATH.repos[0]._fast_package_checker:
|
||||
spack.repo.PATH.repos[0]._fast_package_checker.invalidate()
|
||||
@@ -307,56 +304,10 @@ def test_pkg_hash(mock_packages):
|
||||
assert len(output) == 1 and all(len(elt) == 32 for elt in output)
|
||||
|
||||
|
||||
group_args = [
|
||||
"/path/one.py", # 12
|
||||
"/path/two.py", # 12
|
||||
"/path/three.py", # 14
|
||||
"/path/four.py", # 13
|
||||
"/path/five.py", # 13
|
||||
"/path/six.py", # 12
|
||||
"/path/seven.py", # 14
|
||||
"/path/eight.py", # 14
|
||||
"/path/nine.py", # 13
|
||||
"/path/ten.py", # 12
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
["max_group_size", "max_group_length", "lengths", "error"],
|
||||
[
|
||||
(3, 1, None, ValueError),
|
||||
(3, 13, None, ValueError),
|
||||
(3, 25, [2, 1, 1, 1, 1, 1, 1, 1, 1], None),
|
||||
(3, 26, [2, 1, 1, 2, 1, 1, 2], None),
|
||||
(3, 40, [3, 3, 2, 2], None),
|
||||
(3, 43, [3, 3, 3, 1], None),
|
||||
(4, 54, [4, 3, 3], None),
|
||||
(4, 56, [4, 4, 2], None),
|
||||
],
|
||||
)
|
||||
def test_group_arguments(mock_packages, max_group_size, max_group_length, lengths, error):
|
||||
generator = spack.cmd.group_arguments(
|
||||
group_args, max_group_size=max_group_size, max_group_length=max_group_length
|
||||
)
|
||||
|
||||
# just check that error cases raise
|
||||
if error:
|
||||
with pytest.raises(ValueError):
|
||||
list(generator)
|
||||
return
|
||||
|
||||
groups = list(generator)
|
||||
assert sum(groups, []) == group_args
|
||||
assert [len(group) for group in groups] == lengths
|
||||
assert all(
|
||||
sum(len(elt) for elt in group) + (len(group) - 1) <= max_group_length for group in groups
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.skipif(not spack.cmd.pkg.get_grep(), reason="grep is not installed")
|
||||
def test_pkg_grep(mock_packages, capfd):
|
||||
# only splice-* mock packages have the string "splice" in them
|
||||
pkg("grep", "-l", "splice")
|
||||
pkg("grep", "-l", "splice", output=str)
|
||||
output, _ = capfd.readouterr()
|
||||
assert output.strip() == "\n".join(
|
||||
spack.repo.PATH.get_pkg_class(name).module.__file__
|
||||
@@ -376,14 +327,12 @@ def test_pkg_grep(mock_packages, capfd):
|
||||
]
|
||||
)
|
||||
|
||||
# ensure that this string isn't found
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
pkg("grep", "abcdefghijklmnopqrstuvwxyz")
|
||||
# ensure that this string isn't fouhnd
|
||||
pkg("grep", "abcdefghijklmnopqrstuvwxyz", output=str, fail_on_error=False)
|
||||
assert pkg.returncode == 1
|
||||
output, _ = capfd.readouterr()
|
||||
assert output.strip() == ""
|
||||
|
||||
# ensure that we return > 1 for an error
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
pkg("grep", "--foobarbaz-not-an-option")
|
||||
pkg("grep", "--foobarbaz-not-an-option", output=str, fail_on_error=False)
|
||||
assert pkg.returncode == 2
|
||||
|
||||
@@ -1,21 +1,15 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import io
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
import spack.repo
|
||||
import spack.repo_migrate
|
||||
from spack.main import SpackCommand
|
||||
from spack.util.executable import Executable
|
||||
|
||||
repo = spack.main.SpackCommand("repo")
|
||||
env = SpackCommand("env")
|
||||
@@ -74,101 +68,3 @@ def test_env_repo_path_vars_substitution(
|
||||
with ev.read("test") as newenv:
|
||||
repos_specs = spack.config.get("repos", default={}, scope=newenv.scope_name)
|
||||
assert current_dir in repos_specs
|
||||
|
||||
|
||||
OLD_7ZIP = b"""\
|
||||
# some comment
|
||||
|
||||
from spack.package import *
|
||||
|
||||
class _7zip(Package):
|
||||
pass
|
||||
"""
|
||||
|
||||
NEW_7ZIP = b"""\
|
||||
# some comment
|
||||
|
||||
from spack_repo.builtin.build_systems.generic import Package
|
||||
from spack.package import *
|
||||
|
||||
class _7zip(Package):
|
||||
pass
|
||||
"""
|
||||
|
||||
OLD_NUMPY = b"""\
|
||||
# some comment
|
||||
|
||||
from spack.package import *
|
||||
|
||||
class PyNumpy(CMakePackage):
|
||||
generator("ninja")
|
||||
"""
|
||||
|
||||
NEW_NUMPY = b"""\
|
||||
# some comment
|
||||
|
||||
from spack_repo.builtin.build_systems.cmake import CMakePackage, generator
|
||||
from spack.package import *
|
||||
|
||||
class PyNumpy(CMakePackage):
|
||||
generator("ninja")
|
||||
"""
|
||||
|
||||
|
||||
def test_repo_migrate(tmp_path: pathlib.Path, config):
|
||||
old_root, _ = spack.repo.create_repo(str(tmp_path), "org.repo", package_api=(1, 0))
|
||||
pkgs_path = pathlib.Path(spack.repo.from_path(old_root).packages_path)
|
||||
new_root = pathlib.Path(old_root) / "spack_repo" / "org" / "repo"
|
||||
|
||||
pkg_7zip_old = pkgs_path / "7zip" / "package.py"
|
||||
pkg_numpy_old = pkgs_path / "py-numpy" / "package.py"
|
||||
pkg_py_7zip_new = new_root / "packages" / "_7zip" / "package.py"
|
||||
pkg_py_numpy_new = new_root / "packages" / "py_numpy" / "package.py"
|
||||
|
||||
pkg_7zip_old.parent.mkdir(parents=True)
|
||||
pkg_numpy_old.parent.mkdir(parents=True)
|
||||
|
||||
pkg_7zip_old.write_bytes(OLD_7ZIP)
|
||||
pkg_numpy_old.write_bytes(OLD_NUMPY)
|
||||
|
||||
repo("migrate", "--fix", old_root)
|
||||
|
||||
# old files are not touched since they are moved
|
||||
assert pkg_7zip_old.read_bytes() == OLD_7ZIP
|
||||
assert pkg_numpy_old.read_bytes() == OLD_NUMPY
|
||||
|
||||
# new files are created and have updated contents
|
||||
assert pkg_py_7zip_new.read_bytes() == NEW_7ZIP
|
||||
assert pkg_py_numpy_new.read_bytes() == NEW_NUMPY
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Known failure on windows")
|
||||
def test_migrate_diff(git: Executable, tmp_path: pathlib.Path):
|
||||
root, _ = spack.repo.create_repo(str(tmp_path), "foo", package_api=(2, 0))
|
||||
r = pathlib.Path(root)
|
||||
pkg_7zip = r / "packages" / "_7zip" / "package.py"
|
||||
pkg_py_numpy_new = r / "packages" / "py_numpy" / "package.py"
|
||||
pkg_broken = r / "packages" / "broken" / "package.py"
|
||||
|
||||
pkg_7zip.parent.mkdir(parents=True)
|
||||
pkg_py_numpy_new.parent.mkdir(parents=True)
|
||||
pkg_broken.parent.mkdir(parents=True)
|
||||
pkg_7zip.write_bytes(OLD_7ZIP)
|
||||
pkg_py_numpy_new.write_bytes(OLD_NUMPY)
|
||||
pkg_broken.write_bytes(b"syntax(error")
|
||||
|
||||
stderr = io.StringIO()
|
||||
|
||||
with open(tmp_path / "imports.patch", "w", encoding="utf-8") as stdout:
|
||||
spack.repo_migrate.migrate_v2_imports(
|
||||
str(r / "packages"), str(r), fix=False, out=stdout, err=stderr
|
||||
)
|
||||
|
||||
assert f"Skipping {pkg_broken}" in stderr.getvalue()
|
||||
|
||||
# apply the patch and verify the changes
|
||||
with working_dir(str(r)):
|
||||
git("apply", str(tmp_path / "imports.patch"))
|
||||
|
||||
assert pkg_7zip.read_bytes() == NEW_7ZIP
|
||||
assert pkg_py_numpy_new.read_bytes() == NEW_NUMPY
|
||||
|
||||
@@ -48,13 +48,11 @@ def test_resource_list(mock_packages, capfd):
|
||||
assert "path:" in out
|
||||
|
||||
assert (
|
||||
os.path.join(
|
||||
"spack_repo", "builtin_mock", "packages", "patch_a_dependency", "libelf.patch"
|
||||
)
|
||||
os.path.join("repos", "builtin.mock", "packages", "patch-a-dependency", "libelf.patch")
|
||||
in out
|
||||
)
|
||||
assert "applies to: builtin_mock.libelf" in out
|
||||
assert "patched by: builtin_mock.patch-a-dependency" in out
|
||||
assert "applies to: builtin.mock.libelf" in out
|
||||
assert "patched by: builtin.mock.patch-a-dependency" in out
|
||||
|
||||
|
||||
def test_resource_list_only_hashes(mock_packages, capfd):
|
||||
@@ -76,12 +74,10 @@ def test_resource_show(mock_packages, capfd):
|
||||
|
||||
assert out.startswith(test_hash)
|
||||
assert (
|
||||
os.path.join(
|
||||
"spack_repo", "builtin_mock", "packages", "patch_a_dependency", "libelf.patch"
|
||||
)
|
||||
os.path.join("repos", "builtin.mock", "packages", "patch-a-dependency", "libelf.patch")
|
||||
in out
|
||||
)
|
||||
assert "applies to: builtin_mock.libelf" in out
|
||||
assert "patched by: builtin_mock.patch-a-dependency" in out
|
||||
assert "applies to: builtin.mock.libelf" in out
|
||||
assert "patched by: builtin.mock.patch-a-dependency" in out
|
||||
|
||||
assert len(out.strip().split("\n")) == 4
|
||||
|
||||
@@ -241,14 +241,14 @@ def test_external_root(external_style_root, capfd):
|
||||
assert "%s Imports are incorrectly sorted" % str(py_file) in output
|
||||
|
||||
# mypy error
|
||||
assert 'lib/spack/spack/dummy.py:47: error: Name "version" is not defined' in output
|
||||
assert 'lib/spack/spack/dummy.py:9: error: Name "Package" is not defined' in output
|
||||
|
||||
# black error
|
||||
assert "--- lib/spack/spack/dummy.py" in output
|
||||
assert "+++ lib/spack/spack/dummy.py" in output
|
||||
|
||||
# flake8 error
|
||||
assert "lib/spack/spack/dummy.py:8: [F401] 'os' imported but unused" in output
|
||||
assert "lib/spack/spack/dummy.py:6: [F401] 'os' imported but unused" in output
|
||||
|
||||
|
||||
@pytest.mark.skipif(not FLAKE8, reason="flake8 is not installed.")
|
||||
@@ -311,10 +311,8 @@ def test_run_import_check(tmp_path: pathlib.Path):
|
||||
import spack.repo
|
||||
import spack.repo_utils
|
||||
|
||||
from spack_repo.builtin_mock.build_systems import autotools
|
||||
|
||||
# this comment about spack.error should not be removed
|
||||
class Example(autotools.AutotoolsPackage):
|
||||
class Example(spack.build_systems.autotools.AutotoolsPackage):
|
||||
"""this is a docstring referencing unused spack.error.SpackError, which is fine"""
|
||||
pass
|
||||
|
||||
@@ -341,6 +339,7 @@ def foo(config: "spack.error.SpackError"):
|
||||
assert "issues.py: redundant import: spack.repo" in output
|
||||
assert "issues.py: redundant import: spack.config" not in output # comment prevents removal
|
||||
assert "issues.py: missing import: spack" in output # used by spack.__version__
|
||||
assert "issues.py: missing import: spack.build_systems.autotools" in output
|
||||
assert "issues.py: missing import: spack.util.executable" in output
|
||||
assert "issues.py: missing import: spack.error" not in output # not directly used
|
||||
assert exit_code == 1
|
||||
@@ -360,6 +359,7 @@ def foo(config: "spack.error.SpackError"):
|
||||
assert exit_code == 1
|
||||
assert "issues.py: redundant import: spack.cmd" in output
|
||||
assert "issues.py: missing import: spack" in output
|
||||
assert "issues.py: missing import: spack.build_systems.autotools" in output
|
||||
assert "issues.py: missing import: spack.util.executable" in output
|
||||
|
||||
# after fix a second fix is idempotent
|
||||
@@ -380,6 +380,7 @@ def foo(config: "spack.error.SpackError"):
|
||||
new_contents = file.read_text()
|
||||
assert "import spack.cmd" not in new_contents
|
||||
assert "import spack\n" in new_contents
|
||||
assert "import spack.build_systems.autotools\n" in new_contents
|
||||
assert "import spack.util.executable\n" in new_contents
|
||||
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ def test_remote_versions_only():
|
||||
@pytest.mark.usefixtures("mock_packages")
|
||||
def test_new_versions_only(monkeypatch):
|
||||
"""Test a package for which new versions should be available."""
|
||||
from spack_repo.builtin_mock.packages.brillig.package import Brillig # type: ignore[import]
|
||||
from spack.pkg.builtin.mock.brillig import Brillig # type: ignore[import]
|
||||
|
||||
def mock_fetch_remote_versions(*args, **kwargs):
|
||||
mock_remote_versions = {
|
||||
|
||||
@@ -6,8 +6,6 @@
|
||||
|
||||
from spack.compilers.config import CompilerFactory
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("config", "mock_packages")]
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_compiler(mock_executable):
|
||||
@@ -57,7 +55,7 @@ def test_compiler_conversion_with_flags(mock_compiler):
|
||||
assert compiler_spec.extra_attributes["flags"]["cxxflags"] == "-O2 -g"
|
||||
|
||||
|
||||
def test_compiler_conversion_with_environment(mock_compiler):
|
||||
def tests_compiler_conversion_with_environment(mock_compiler):
|
||||
"""Tests that custom environment modifications are converted appropriately
|
||||
for external compilers
|
||||
"""
|
||||
@@ -69,7 +67,7 @@ def test_compiler_conversion_with_environment(mock_compiler):
|
||||
assert compiler_spec.extra_attributes["environment"] == mods
|
||||
|
||||
|
||||
def test_compiler_conversion_extra_rpaths(mock_compiler):
|
||||
def tests_compiler_conversion_extra_rpaths(mock_compiler):
|
||||
"""Tests that extra rpaths are converted appropriately for external compilers"""
|
||||
mock_compiler["extra_rpaths"] = ["/foo/bar"]
|
||||
compiler_spec = CompilerFactory.from_legacy_yaml(mock_compiler)[0]
|
||||
@@ -78,7 +76,7 @@ def test_compiler_conversion_extra_rpaths(mock_compiler):
|
||||
assert compiler_spec.extra_attributes["extra_rpaths"] == ["/foo/bar"]
|
||||
|
||||
|
||||
def test_compiler_conversion_modules(mock_compiler):
|
||||
def tests_compiler_conversion_modules(mock_compiler):
|
||||
"""Tests that modules are converted appropriately for external compilers"""
|
||||
modules = ["foo/4.1.2", "bar/5.1.4"]
|
||||
mock_compiler["modules"] = modules
|
||||
@@ -88,7 +86,7 @@ def test_compiler_conversion_modules(mock_compiler):
|
||||
|
||||
|
||||
@pytest.mark.regression("49717")
|
||||
def test_compiler_conversion_corrupted_paths(mock_compiler):
|
||||
def tests_compiler_conversion_corrupted_paths(mock_compiler):
|
||||
"""Tests that compiler entries with corrupted path do not raise"""
|
||||
mock_compiler["paths"] = {"cc": "gcc", "cxx": "g++", "fc": "gfortran", "f77": "gfortran"}
|
||||
# Test this call doesn't raise
|
||||
|
||||
@@ -28,14 +28,11 @@ def call_compiler(exe, *args, **kwargs):
|
||||
@pytest.fixture()
|
||||
def mock_gcc(config):
|
||||
compilers = spack.compilers.config.all_compilers_from(configuration=config)
|
||||
assert compilers, "No compilers available"
|
||||
|
||||
compilers.sort(key=lambda x: (x.name == "gcc", x.version))
|
||||
# Deepcopy is used to avoid more boilerplate when changing the "extra_attributes"
|
||||
return copy.deepcopy(compilers[-1])
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_packages")
|
||||
class TestCompilerPropertyDetector:
|
||||
@pytest.mark.parametrize(
|
||||
"language,flagname",
|
||||
|
||||
@@ -29,7 +29,7 @@ def _concretize_with_reuse(*, root_str, reused_str):
|
||||
|
||||
@pytest.fixture
|
||||
def runtime_repo(mutable_config):
|
||||
repo = os.path.join(spack.paths.test_repos_path, "spack_repo", "compiler_runtime_test")
|
||||
repo = os.path.join(spack.paths.test_repos_path, "compiler_runtime.test")
|
||||
with spack.repo.use_repositories(repo) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
||||
@@ -0,0 +1,67 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import pytest
|
||||
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.spec
|
||||
|
||||
|
||||
@pytest.mark.parametrize("holds,mpi", [(True, "zmpi"), (True, "mpich"), (False, "mpich")])
|
||||
def test_conditional_deps(holds, mpi, config, mock_packages):
|
||||
sigil = "+" if holds else "~"
|
||||
request = f"hdf5{sigil}mpi ^[when='^mpi' virtuals=mpi]{mpi}"
|
||||
concrete = spack.concretize.concretize_one(request)
|
||||
|
||||
assert (mpi in concrete) == holds
|
||||
assert ("mpi" in concrete) == holds
|
||||
|
||||
|
||||
@pytest.mark.parametrize("c", [True, False])
|
||||
@pytest.mark.parametrize("cxx", [True, False])
|
||||
@pytest.mark.parametrize("fortran", [True, False])
|
||||
def test_conditional_compilers(c, cxx, fortran, mutable_config, mock_packages):
|
||||
# Configure two gcc compilers that could be concretized to
|
||||
# We will confirm concretization matches the less preferred one
|
||||
extra_attributes_block = {
|
||||
"compilers": {"c": "/path/to/gcc", "cxx": "/path/to/g++", "fortran": "/path/to/fortran"}
|
||||
}
|
||||
spack.config.CONFIG.set(
|
||||
"packages:gcc:externals::",
|
||||
[
|
||||
{
|
||||
"spec": "gcc@12.3.1 languages=c,c++,fortran",
|
||||
"prefix": "/path",
|
||||
"extra_attributes": extra_attributes_block,
|
||||
},
|
||||
{
|
||||
"spec": "gcc@10.3.1 languages=c,c++,fortran",
|
||||
"prefix": "/path",
|
||||
"extra_attributes": extra_attributes_block,
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
# Abstract spec parametrized to depend/not on c/cxx/fortran
|
||||
# and with conditional dependencies for each on the less preferred gcc
|
||||
abstract = spack.spec.Spec("conditional-languages")
|
||||
abstract.constrain(f"c={c}")
|
||||
abstract.constrain(f"cxx={cxx}")
|
||||
abstract.constrain(f"fortran={fortran}")
|
||||
|
||||
preferred_gcc = spack.concretize.concretize_one(abstract)
|
||||
abstract.constrain(
|
||||
"^[when='%c' virtuals=c]gcc@10.3.1 "
|
||||
"^[when='%cxx' virtuals=cxx]gcc@10.3.1 "
|
||||
"^[when='%fortran' virtuals=fortran]gcc@10.3.1"
|
||||
)
|
||||
concrete = spack.concretize.concretize_one(abstract)
|
||||
|
||||
# We should get the dependency we specified for each language we enabled
|
||||
assert concrete.satisfies("%[virtuals=c]gcc@10.3.1") == c
|
||||
assert concrete.satisfies("%[virtuals=cxx]gcc@10.3.1") == cxx
|
||||
assert concrete.satisfies("%[virtuals=fortran]gcc@10.3.1") == fortran
|
||||
|
||||
# The only time the two concrete specs are the same is if we don't use gcc at all
|
||||
assert (concrete == preferred_gcc) == (not any((c, cxx, fortran)))
|
||||
@@ -2,7 +2,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import sys
|
||||
|
||||
@@ -80,7 +79,7 @@ def binary_compatibility(monkeypatch, request):
|
||||
return
|
||||
|
||||
if "mock_packages" not in request.fixturenames:
|
||||
# Only builtin_mock has a mock glibc package
|
||||
# Only builtin.mock has a mock glibc package
|
||||
return
|
||||
|
||||
if "database" in request.fixturenames or "mutable_database" in request.fixturenames:
|
||||
@@ -171,12 +170,18 @@ def reverser(pkg_name):
|
||||
|
||||
@pytest.fixture()
|
||||
def repo_with_changing_recipe(tmp_path_factory, mutable_mock_repo):
|
||||
repos_dir: pathlib.Path = tmp_path_factory.mktemp("repos_dir")
|
||||
root, _ = spack.repo.create_repo(str(repos_dir), "changing")
|
||||
packages_dir = pathlib.Path(root, "packages")
|
||||
repo_namespace = "changing"
|
||||
repo_dir = tmp_path_factory.mktemp(repo_namespace)
|
||||
|
||||
(repo_dir / "repo.yaml").write_text(
|
||||
"""
|
||||
repo:
|
||||
namespace: changing
|
||||
"""
|
||||
)
|
||||
|
||||
packages_dir = repo_dir / "packages"
|
||||
root_pkg_str = """
|
||||
from spack_repo.builtin_mock.build_systems.generic import Package
|
||||
from spack.package import *
|
||||
|
||||
class Root(Package):
|
||||
@@ -194,7 +199,6 @@ class Root(Package):
|
||||
package_py.write_text(root_pkg_str)
|
||||
|
||||
middle_pkg_str = """
|
||||
from spack_repo.builtin_mock.build_systems.generic import Package
|
||||
from spack.package import *
|
||||
|
||||
class Middle(Package):
|
||||
@@ -209,7 +213,6 @@ class Middle(Package):
|
||||
package_py.write_text(middle_pkg_str)
|
||||
|
||||
changing_template = """
|
||||
from spack_repo.builtin_mock.build_systems.generic import Package
|
||||
from spack.package import *
|
||||
|
||||
class Changing(Package):
|
||||
@@ -232,7 +235,7 @@ class Changing(Package):
|
||||
{% endif %}
|
||||
"""
|
||||
|
||||
with spack.repo.use_repositories(root, override=False) as repos:
|
||||
with spack.repo.use_repositories(str(repo_dir), override=False) as repository:
|
||||
|
||||
class _ChangingPackage:
|
||||
default_context = [
|
||||
@@ -241,22 +244,27 @@ class _ChangingPackage:
|
||||
("add_variant", False),
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, repo_directory):
|
||||
self.repo_dir = repo_directory
|
||||
cache_dir = tmp_path_factory.mktemp("cache")
|
||||
self.repo_cache = spack.util.file_cache.FileCache(str(cache_dir))
|
||||
self.repo = spack.repo.Repo(root, cache=self.repo_cache)
|
||||
self.repo = spack.repo.Repo(str(repo_directory), cache=self.repo_cache)
|
||||
|
||||
def change(self, changes=None):
|
||||
changes = changes or {}
|
||||
context = dict(self.default_context)
|
||||
context.update(changes)
|
||||
# Remove the repo object and delete Python modules
|
||||
repos.remove(self.repo)
|
||||
repository.remove(self.repo)
|
||||
# TODO: this mocks a change in the recipe that should happen in a
|
||||
# TODO: different process space. Leaving this comment as a hint
|
||||
# TODO: in case tests using this fixture start failing.
|
||||
for module in [x for x in sys.modules if x.startswith("spack_repo.changing")]:
|
||||
del sys.modules[module]
|
||||
if sys.modules.get("spack.pkg.changing.changing"):
|
||||
del sys.modules["spack.pkg.changing.changing"]
|
||||
if sys.modules.get("spack.pkg.changing.root"):
|
||||
del sys.modules["spack.pkg.changing.root"]
|
||||
if sys.modules.get("spack.pkg.changing"):
|
||||
del sys.modules["spack.pkg.changing"]
|
||||
|
||||
# Change the recipe
|
||||
t = _vendoring.jinja2.Template(changing_template)
|
||||
@@ -266,10 +274,10 @@ def change(self, changes=None):
|
||||
package_py.write_text(changing_pkg_str)
|
||||
|
||||
# Re-add the repository
|
||||
self.repo = spack.repo.Repo(root, cache=self.repo_cache)
|
||||
repos.put_first(self.repo)
|
||||
self.repo = spack.repo.Repo(str(self.repo_dir), cache=self.repo_cache)
|
||||
repository.put_first(self.repo)
|
||||
|
||||
_changing_pkg = _ChangingPackage()
|
||||
_changing_pkg = _ChangingPackage(repo_dir)
|
||||
_changing_pkg.change(
|
||||
{"delete_version": False, "delete_variant": False, "add_variant": False}
|
||||
)
|
||||
@@ -366,11 +374,11 @@ def test_provides_handles_multiple_providers_of_same_version(self):
|
||||
# Note that providers are repo-specific, so we don't misinterpret
|
||||
# providers, but vdeps are not namespace-specific, so we can
|
||||
# associate vdeps across repos.
|
||||
assert Spec("builtin_mock.multi-provider-mpi@1.10.3") in providers
|
||||
assert Spec("builtin_mock.multi-provider-mpi@1.10.2") in providers
|
||||
assert Spec("builtin_mock.multi-provider-mpi@1.10.1") in providers
|
||||
assert Spec("builtin_mock.multi-provider-mpi@1.10.0") in providers
|
||||
assert Spec("builtin_mock.multi-provider-mpi@1.8.8") in providers
|
||||
assert Spec("builtin.mock.multi-provider-mpi@1.10.3") in providers
|
||||
assert Spec("builtin.mock.multi-provider-mpi@1.10.2") in providers
|
||||
assert Spec("builtin.mock.multi-provider-mpi@1.10.1") in providers
|
||||
assert Spec("builtin.mock.multi-provider-mpi@1.10.0") in providers
|
||||
assert Spec("builtin.mock.multi-provider-mpi@1.8.8") in providers
|
||||
|
||||
def test_different_compilers_get_different_flags(
|
||||
self, mutable_config, clang12_with_flags, gcc11_with_flags
|
||||
@@ -751,7 +759,7 @@ def test_virtual_is_fully_expanded_for_mpileaks(self):
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,expected,not_expected",
|
||||
[
|
||||
# clang (llvm~flang) only provides C, and C++ compilers, while gcc has also fortran
|
||||
# clang only provides C, and C++ compilers, while gcc has also fortran
|
||||
#
|
||||
# If we ask mpileaks%clang, then %gcc must be used for fortran, and since
|
||||
# %gcc is preferred to clang in config, it will be used for most nodes
|
||||
@@ -1708,12 +1716,12 @@ def test_reuse_with_unknown_namespace_dont_raise(
|
||||
):
|
||||
with spack.repo.use_repositories(mock_custom_repository, override=False):
|
||||
s = spack.concretize.concretize_one("pkg-c")
|
||||
assert s.namespace != "builtin_mock"
|
||||
assert s.namespace != "builtin.mock"
|
||||
PackageInstaller([s.package], fake=True, explicit=True).install()
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
s = spack.concretize.concretize_one("pkg-c")
|
||||
assert s.namespace == "builtin_mock"
|
||||
assert s.namespace == "builtin.mock"
|
||||
|
||||
@pytest.mark.regression("45538")
|
||||
def test_reuse_from_other_namespace_no_raise(self, tmpdir, temporary_store, monkeypatch):
|
||||
@@ -1744,7 +1752,7 @@ def test_reuse_with_unknown_package_dont_raise(self, tmpdir, temporary_store, mo
|
||||
repos.repos[0]._pkg_checker.invalidate()
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
s = spack.concretize.concretize_one("pkg-c")
|
||||
assert s.namespace == "builtin_mock"
|
||||
assert s.namespace == "builtin.mock"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"specs,checks",
|
||||
@@ -2321,10 +2329,10 @@ def test_reuse_python_from_cli_and_extension_from_db(self, mutable_database):
|
||||
"spec_str,expected_namespaces",
|
||||
[
|
||||
# Single node with fully qualified namespace
|
||||
("builtin_mock.gmake", {"gmake": "builtin_mock"}),
|
||||
("builtin.mock.gmake", {"gmake": "builtin.mock"}),
|
||||
# Dependency with fully qualified namespace
|
||||
("hdf5 ^builtin_mock.gmake", {"gmake": "builtin_mock", "hdf5": "duplicates_test"}),
|
||||
("hdf5 ^gmake", {"gmake": "duplicates_test", "hdf5": "duplicates_test"}),
|
||||
("hdf5 ^builtin.mock.gmake", {"gmake": "builtin.mock", "hdf5": "duplicates.test"}),
|
||||
("hdf5 ^gmake", {"gmake": "duplicates.test", "hdf5": "duplicates.test"}),
|
||||
],
|
||||
)
|
||||
def test_select_lower_priority_package_from_repository_stack(
|
||||
@@ -2333,10 +2341,8 @@ def test_select_lower_priority_package_from_repository_stack(
|
||||
"""Tests that a user can explicitly select a lower priority, fully qualified dependency
|
||||
from cli.
|
||||
"""
|
||||
# 'builtin_mock" and "duplicates_test" share a 'gmake' package
|
||||
additional_repo = os.path.join(
|
||||
spack.paths.test_repos_path, "spack_repo", "duplicates_test"
|
||||
)
|
||||
# 'builtin.mock" and "duplicates.test" share a 'gmake' package
|
||||
additional_repo = os.path.join(spack.paths.test_repos_path, "duplicates.test")
|
||||
with spack.repo.use_repositories(additional_repo, override=False):
|
||||
s = spack.concretize.concretize_one(spec_str)
|
||||
|
||||
@@ -2580,7 +2586,7 @@ def test_correct_external_is_selected_from_packages_yaml(self, mutable_config):
|
||||
|
||||
@pytest.fixture()
|
||||
def duplicates_test_repository():
|
||||
repository_path = os.path.join(spack.paths.test_repos_path, "spack_repo", "duplicates_test")
|
||||
repository_path = os.path.join(spack.paths.test_repos_path, "duplicates.test")
|
||||
with spack.repo.use_repositories(repository_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
@@ -2815,7 +2821,7 @@ def test_adding_specs(self, input_specs, default_mock_concretization):
|
||||
|
||||
@pytest.fixture()
|
||||
def edges_test_repository():
|
||||
repository_path = os.path.join(spack.paths.test_repos_path, "spack_repo", "edges_test")
|
||||
repository_path = os.path.join(spack.paths.test_repos_path, "edges.test")
|
||||
with spack.repo.use_repositories(repository_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
||||
@@ -81,8 +81,10 @@ def test_internal_error_handling_formatting(tmp_path):
|
||||
assert "the following specs were not solved:\n - baz+z\n" in output
|
||||
assert (
|
||||
"the following specs were concretized, but do not satisfy the input:\n"
|
||||
" - foo+x\n"
|
||||
" - bar+y\n"
|
||||
" - input: foo+x\n"
|
||||
" output: foo@=1.0~x\n"
|
||||
" - input: bar+y\n"
|
||||
" output: x@=1.0~y"
|
||||
) in output
|
||||
|
||||
files = {f.name: str(f) for f in tmp_path.glob("spack-asp-*/*.json")}
|
||||
|
||||
@@ -46,7 +46,7 @@
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(mutable_config, monkeypatch, mock_stage):
|
||||
repo_dir = pathlib.Path(spack.paths.test_repos_path) / "spack_repo" / "flags_test"
|
||||
repo_dir = pathlib.Path(spack.paths.test_repos_path) / "flags.test"
|
||||
with spack.repo.use_repositories(str(repo_dir)) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.version
|
||||
@@ -28,7 +29,7 @@ def update_packages_config(conf_str):
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(mutable_config, monkeypatch, mock_stage):
|
||||
repo_dir = pathlib.Path(spack.paths.test_repos_path) / "spack_repo" / "requirements_test"
|
||||
repo_dir = pathlib.Path(spack.paths.test_repos_path) / "requirements.test"
|
||||
with spack.repo.use_repositories(str(repo_dir)) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
@@ -766,21 +767,21 @@ def test_skip_requirement_when_default_requirement_condition_cannot_be_met(
|
||||
|
||||
def test_requires_directive(mock_packages, config):
|
||||
# This package requires either clang or gcc
|
||||
s = spack.concretize.concretize_one("requires-clang-or-gcc")
|
||||
s = spack.concretize.concretize_one("requires_clang_or_gcc")
|
||||
assert s.satisfies("%gcc")
|
||||
s = spack.concretize.concretize_one("requires-clang-or-gcc %gcc")
|
||||
s = spack.concretize.concretize_one("requires_clang_or_gcc %gcc")
|
||||
assert s.satisfies("%gcc")
|
||||
s = spack.concretize.concretize_one("requires-clang-or-gcc %clang")
|
||||
s = spack.concretize.concretize_one("requires_clang_or_gcc %clang")
|
||||
# Test both the real package (llvm) and its alias (clang)
|
||||
assert s.satisfies("%llvm") and s.satisfies("%clang")
|
||||
|
||||
# This package can only be compiled with clang
|
||||
s = spack.concretize.concretize_one("requires-clang")
|
||||
s = spack.concretize.concretize_one("requires_clang")
|
||||
assert s.satisfies("%llvm")
|
||||
s = spack.concretize.concretize_one("requires-clang %clang")
|
||||
s = spack.concretize.concretize_one("requires_clang %clang")
|
||||
assert s.satisfies("%llvm")
|
||||
with pytest.raises(spack.error.SpackError, match="can only be compiled with Clang"):
|
||||
spack.concretize.concretize_one("requires-clang %gcc")
|
||||
spack.concretize.concretize_one("requires_clang %gcc")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -1301,3 +1302,52 @@ def test_requirements_on_compilers_and_reuse(
|
||||
assert is_pkgb_reused == expected_reuse
|
||||
for c in expected_contraints:
|
||||
assert pkga.satisfies(c)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"abstract,req_is_noop",
|
||||
[
|
||||
("hdf5+mpi", False),
|
||||
("hdf5~mpi", True),
|
||||
("conditional-languages+c", False),
|
||||
("conditional-languages+cxx", False),
|
||||
("conditional-languages+fortran", False),
|
||||
("conditional-languages~c~cxx~fortran", True),
|
||||
],
|
||||
)
|
||||
def test_requirements_conditional_deps(abstract, req_is_noop, mutable_config, mock_packages):
|
||||
required_spec = (
|
||||
"%[when='^c' virtuals=c]gcc@10.3.1 "
|
||||
"%[when='^cxx' virtuals=cxx]gcc@10.3.1 "
|
||||
"%[when='^fortran' virtuals=fortran]gcc@10.3.1 "
|
||||
"^[when='^mpi' virtuals=mpi]zmpi"
|
||||
)
|
||||
abstract = spack.spec.Spec(abstract)
|
||||
|
||||
# Configure two gcc compilers that could be concretized to
|
||||
# We will confirm concretization matches the less preferred one
|
||||
extra_attributes_block = {
|
||||
"compilers": {"c": "/path/to/gcc", "cxx": "/path/to/g++", "fortran": "/path/to/fortran"}
|
||||
}
|
||||
spack.config.CONFIG.set(
|
||||
"packages:gcc:externals::",
|
||||
[
|
||||
{
|
||||
"spec": "gcc@12.3.1 languages=c,c++,fortran",
|
||||
"prefix": "/path",
|
||||
"extra_attributes": extra_attributes_block,
|
||||
},
|
||||
{
|
||||
"spec": "gcc@10.3.1 languages=c,c++,fortran",
|
||||
"prefix": "/path",
|
||||
"extra_attributes": extra_attributes_block,
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
no_requirements = spack.concretize.concretize_one(abstract)
|
||||
spack.config.CONFIG.set(f"packages:{abstract.name}", {"require": required_spec})
|
||||
requirements = spack.concretize.concretize_one(abstract)
|
||||
|
||||
assert requirements.satisfies(required_spec)
|
||||
assert (requirements == no_requirements) == req_is_noop # show the reqs change concretization
|
||||
|
||||
@@ -654,7 +654,7 @@ def mock_pkg_install(monkeypatch):
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def mock_packages(mock_repo_path, mock_pkg_install, request):
|
||||
"""Use the 'builtin_mock' repository instead of 'builtin'"""
|
||||
"""Use the 'builtin.mock' repository instead of 'builtin'"""
|
||||
ensure_configuration_fixture_run_before(request)
|
||||
with spack.repo.use_repositories(mock_repo_path) as mock_repo:
|
||||
yield mock_repo
|
||||
@@ -887,7 +887,6 @@ def no_packages_yaml(mutable_config):
|
||||
compilers_yaml = local_config.get_section_filename("packages")
|
||||
if os.path.exists(compilers_yaml):
|
||||
os.remove(compilers_yaml)
|
||||
mutable_config.clear_caches()
|
||||
return mutable_config
|
||||
|
||||
|
||||
@@ -1434,7 +1433,7 @@ def mock_git_repository(git, tmpdir_factory):
|
||||
of these refers to a repository with a single commit.
|
||||
|
||||
c0, c1, and c2 include information to define explicit versions in the
|
||||
associated builtin_mock package 'git-test'. c3 is a commit in the
|
||||
associated builtin.mock package 'git-test'. c3 is a commit in the
|
||||
repository but does not have an associated explicit package version.
|
||||
"""
|
||||
suburls = []
|
||||
@@ -2055,7 +2054,7 @@ def shell_as(shell):
|
||||
@pytest.fixture()
|
||||
def nullify_globals(request, monkeypatch):
|
||||
ensure_configuration_fixture_run_before(request)
|
||||
monkeypatch.setattr(spack.config, "CONFIG", None)
|
||||
monkeypatch.setattr(spack.config, "CONFIG", {}) # So basic get operations do not throw
|
||||
monkeypatch.setattr(spack.caches, "MISC_CACHE", None)
|
||||
monkeypatch.setattr(spack.caches, "FETCH_CACHE", None)
|
||||
monkeypatch.setattr(spack.repo, "PATH", None)
|
||||
@@ -2073,11 +2072,6 @@ def pytest_runtest_setup(item):
|
||||
if only_windows_marker and sys.platform != "win32":
|
||||
pytest.skip(*only_windows_marker.args)
|
||||
|
||||
# Skip tests marked "requires_builtin" if builtin repo is required
|
||||
requires_builtin_marker = item.get_closest_marker(name="requires_builtin")
|
||||
if requires_builtin_marker and not os.path.exists(spack.paths.packages_path):
|
||||
pytest.skip(*requires_builtin_marker.args)
|
||||
|
||||
|
||||
def _sequential_executor(*args, **kwargs):
|
||||
return spack.util.parallel.SequentialExecutor()
|
||||
@@ -2100,6 +2094,35 @@ def mock_modules_root(tmp_path, monkeypatch):
|
||||
monkeypatch.setattr(spack.modules.common, "root_path", fn)
|
||||
|
||||
|
||||
_repo_name_id = 0
|
||||
|
||||
|
||||
def create_test_repo(tmpdir, pkg_name_content_tuples):
|
||||
global _repo_name_id
|
||||
|
||||
repo_path = str(tmpdir)
|
||||
repo_yaml = tmpdir.join("repo.yaml")
|
||||
with open(str(repo_yaml), "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
repo:
|
||||
namespace: testrepo{str(_repo_name_id)}
|
||||
"""
|
||||
)
|
||||
|
||||
_repo_name_id += 1
|
||||
|
||||
packages_dir = tmpdir.join("packages")
|
||||
for pkg_name, pkg_str in pkg_name_content_tuples:
|
||||
pkg_dir = packages_dir.ensure(pkg_name, dir=True)
|
||||
pkg_file = pkg_dir.join("package.py")
|
||||
with open(str(pkg_file), "w", encoding="utf-8") as f:
|
||||
f.write(pkg_str)
|
||||
|
||||
repo_cache = spack.util.file_cache.FileCache(str(tmpdir.join("cache")))
|
||||
return spack.repo.Repo(repo_path, cache=repo_cache)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def compiler_factory():
|
||||
"""Factory for a compiler dict, taking a spec and an OS as arguments."""
|
||||
|
||||
@@ -81,7 +81,7 @@ packages:
|
||||
fortran: /path/bin/gfortran-10
|
||||
llvm:
|
||||
externals:
|
||||
- spec: "llvm@15.0.0 +clang~flang os={linux_os.name}{linux_os.version} target={target}"
|
||||
- spec: "llvm@15.0.0 +clang os={linux_os.name}{linux_os.version} target={target}"
|
||||
prefix: /path
|
||||
extra_attributes:
|
||||
compilers:
|
||||
|
||||
@@ -2,11 +2,10 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package_base import PackageBase
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class DiffTest(PackageBase):
|
||||
class DiffTest(AutotoolsPackage):
|
||||
"""zlib replacement with optimizations for next generation systems."""
|
||||
|
||||
homepage = "https://github.com/zlib-ng/zlib-ng"
|
||||
|
||||
@@ -2,11 +2,10 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package_base import PackageBase
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class DiffTest(PackageBase):
|
||||
class DiffTest(AutotoolsPackage):
|
||||
"""zlib replacement with optimizations for next generation systems."""
|
||||
|
||||
homepage = "https://github.com/zlib-ng/zlib-ng"
|
||||
|
||||
@@ -2,11 +2,10 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package_base import PackageBase
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class DiffTest(PackageBase):
|
||||
class DiffTest(AutotoolsPackage):
|
||||
"""zlib replacement with optimizations for next generation systems."""
|
||||
|
||||
homepage = "https://github.com/zlib-ng/zlib-ng"
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -70,7 +70,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -114,7 +114,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -137,7 +137,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -160,7 +160,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -183,7 +183,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -215,7 +215,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -253,7 +253,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -276,7 +276,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -314,7 +314,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -337,7 +337,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -368,7 +368,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -133,7 +133,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -209,7 +209,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -264,7 +264,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -319,7 +319,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -374,7 +374,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -438,7 +438,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -508,7 +508,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -563,7 +563,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -633,7 +633,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -688,7 +688,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -751,7 +751,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -806,7 +806,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -882,7 +882,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
|
||||
@@ -58,7 +58,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -136,7 +136,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -214,7 +214,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -268,7 +268,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -322,7 +322,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -376,7 +376,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -440,7 +440,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -511,7 +511,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -565,7 +565,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -636,7 +636,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -690,7 +690,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -753,7 +753,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -807,7 +807,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -885,7 +885,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin_mock",
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
|
||||
@@ -18,7 +18,7 @@ spec:
|
||||
compiler:
|
||||
name: gcc
|
||||
version: 4.5.0
|
||||
namespace: builtin_mock
|
||||
namespace: builtin.mock
|
||||
parameters:
|
||||
optimize: true
|
||||
pic: true
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQINBGf23+EBEAC6UqaiE43cF9jFuVjA8xJ5j31BMhufpnk0cwoE5Iks/GgR/Hki
|
||||
LMYbzy36V7TZGObel+5DtFKipX+WCwWj2XsjbeqHeuCkxZhzHFwfi1UJl9FO2T28
|
||||
iNn6OsBiGeU6ULNmehSia2hx0uhj1re/FUwJExOAvuYv8nc7M+nozqi7Pp/WjP8v
|
||||
UTiqP2onzZJbidlSBvmZ2nheWk7G78e617gcV/ye+UyXZvciiF2UQBg9YV6D8JuD
|
||||
YhBbNAVOzJOiyOdTBmZmOkmYsGx58sEbFVqGeOMB0xoxZrqKjMm9NhvjqjJF/sWs
|
||||
hN/PD5ylW1UR05/fGxlG2GLKKfBInbdqnC101OFWXP5HenYHmKaBJoCKCAUfsoJ0
|
||||
r/t/GVh3z3w/99p0TRDONnTecKm5S9z3/5QjjE5RsWcd4ll7mRikUiVpe1WhKRwT
|
||||
4T76pQLq3XwNJqiOmuMQuSHoBE9OMufvRFiTYC0QHyLoCV2H5PCWtS2xSsIDN4PB
|
||||
0RNd0hnHKanVV7d2TkIrGOagoAo0wXqyW/Op6KUG1NdaFYYziDFEHeZxfGoPKytO
|
||||
iS5PEwZG2FqambAZhJU5OXwzgnCRIoE5DCZad4YS6U5YD/2zg+RrQ/5GUxl5Cc+W
|
||||
Zwesn9FV5jywx/oFePYbTSNQVPQ6jbUDvhmHvZ8c/OfGOVXQr0VpvfIwdwARAQAB
|
||||
tD1UZXN0IFNpZ25pbmcgS2V5IChHUEcgY3JlYXRlZCBmb3IgU3BhY2spIDxub2Jv
|
||||
ZHlAbm93aGVyZS5jb20+iQJRBBMBCAA7FiEEqYoEuILhnYX9Nu4GlWXYCwVckv8F
|
||||
Amf23+ECGwMFCwkIBwICIgIGFQoJCAsCBBYCAwECHgcCF4AACgkQlWXYCwVckv9i
|
||||
pg//eGjBR9ph9hUYRsekzKWM1xB5zFOFfNoqlpCut/W7LAfy0XXkFy/y6EvPdcgn
|
||||
lLWRWPsOFfsKGwZd7LgSovhEMQ2MRsAUUB/KNZx7s6vO/P773PmJspF3odQ/lcrM
|
||||
1fum2lShChWqimdBdNLrXxG+8duO9uWaMBIp28diBCyB25M/MqpHtKYu00FB/QJ6
|
||||
ZwQH4OsgXVQHRjyrtIGx/2FQoWt0ah3eJMJCEw46GgkgiojtoTfXQQc4fIJP324b
|
||||
O1sxz5lx3xVBG/EZYzyV3xnSoG9aZNJ1cJq8EKO7ZoNKc/8jwkVu5gewGaXYI0LK
|
||||
/WkOeiXcSHPMSdu7TpnitvLYFCjc9YAEKQnjooXdt7+BElwC3+5hZJNXEnoGPMzn
|
||||
3UL60sQE/ViCsGcW+l9rtzXPNTmLMjEg4rGRqOhX+UmwyhvGD2QYbZtXlayu5xn+
|
||||
5m/PfmdqgL1xsdvNsLo/BOo+6kizMdBk48Xfp0YM8AC4BzUEENypGzC4T0WYF0k1
|
||||
Jfc6/eSwiytIcIkJ42GlaVfEFE8UxfYc1/2zqTBN9EdzWJqy0Bh+mVOgOaeb0Dzi
|
||||
xWpUpChi1fBB3PXWJ5iAS/w0HSVn4G5/JAIEFAs7r6ju2YtKBfuk+u/K5Q28mo7W
|
||||
6LrZQywN44nBMTvSQUhhXpSNYG+juyotXJUJ3F2u9Cf/jVU=
|
||||
=TkbL
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
@@ -1,29 +0,0 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQINBGgnhhYBEAC5LOSkJlxL4rRDBLDatswpzAw7NQnONW37hwOauEf6rlw/wk6J
|
||||
2D1l/jjmGwyo1iHOEu1/26fMuXMmG0vAxOQJFrkoKAgxDUD9nL0GqTJyg0+yTCN6
|
||||
xsWsrIZi+8oNDXYzLiejICZorc+ri11kcZdA+WE2hWPRStmJH75afpSd7XfNijqb
|
||||
MPfDZBcr+pLeARSH11BTfb8Dtm9qN//+X+pNIUqeHL9hLu/W9hb3GCfXqnsCQJA1
|
||||
WMFTrbCcPYm0R7EevMnscFvS8xbhocBPDwZ12f4W5CugrL29X4Vx9SaUlIyy/+SC
|
||||
2Gwi8Yq78Y4dTN7N5aA8L169/uqy4Tx7/966wMkUYXk7UxmH9E0ol5EZYnY9SCj6
|
||||
xLtMNKA+NLwESj0azaWEzxfztyNdTYfG8Eaa/QGFs1YVGhYdmcEp8KDbQg5FBeCA
|
||||
I6MUcH0XWOTJaZI/oEtukMYHzBt9jyyq6Gp45TiQvOou0wE+w/zJcd9Td23R81KW
|
||||
GfMh5r80NET/bx88vee4NNHkWCphhqs53rIrhWV3y3WKaWp7DfP3WMiTBJ+Yc+PI
|
||||
0vMIHKYNy+OqwTjmwgKdN1w1xZhLG7hx0sAdcZGP7q0A6381HtucgS/fucDogMnW
|
||||
H3anE8UGx4HBRjyXsuOaOAgNw2K4IwancUSf67WSzji3AiP46sUun5ERNQARAQAB
|
||||
tBlTcGFjayA8c3BhY2tAc3BhY2suc3BhY2s+iQJXBBMBCgBBFiEEy6ssEDLG/1B4
|
||||
BJ7A+mHVDBLK034FAmgnhhYCGwMFCQWjmoAFCwkIBwICIgIGFQoJCAsCBBYCAwEC
|
||||
HgcCF4AACgkQ+mHVDBLK034zWhAAtjm802qaTSCvB9WvY1RM65/B1GUK3ZEv3fw/
|
||||
Dvt3xd3mh+rzWBTJ8t7+/cPaOq7qOGnfUateHgou+0T6lgCLkrwr4lFa6yZSUATb
|
||||
xcnopcA0Dal218UcIRb20PjPtoKu3Tt9JFceXJGCTYoGz5HbkOemwkR8B+4qMRPW
|
||||
sn1IhV32eig2HUzrUXVOv6WomMtk2qUpND0WnTlZo3EoInJeTzdlXkOR3lRLADM9
|
||||
yPM6Rp8AV/ykM9DztL4SinzyZjqEM7o1H7EFITZSlkjcBPvqDlvowZGN8TVbG9TQ
|
||||
8Nfz8BYF3SVaPduwXwhbE9D8jqtNt652IZ1+1KbMii1l4deu0UYx8BSfJjNANTTU
|
||||
jFDiyNaGnn5OsZXNllsyAHWky6ApyBD9qFxxNr0kiWbVrrN6s2u4ghm5Hgtdx40v
|
||||
hA9+kvB2mtV/HklUkwDTJ6Ytgp5veh8GKvBD9eAWIitl6w153Rba5LkZbk2ijK6k
|
||||
oyN9Ge/YloSMwXpIEnE7/SRE1o5vye294BZjyqnr+U+wzbEYbC7eXJ0peDCbpbZc
|
||||
0kxMDDbrhmHeEaHeWF30hm6WBaUT4SUcPj5BiV3mt3BhtRgAwA3SvuSenk2yRzR8
|
||||
tBES4b/RBmOczfs4w4m5rAmfVNkNwykry4M2jPCJhVA2qG8q1gLxf+AvaPcAvQ8D
|
||||
kmDeNLI=
|
||||
=CYuA
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
@@ -0,0 +1,29 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQINBGfHlp4BEAC5wkZSHqF9z6GcymuHpk1m9aNXCJdt4ZWvE8ck8GcuVu1nbzlZ
|
||||
h959jqtwk7nFMki5YaNMz6jcQf0eeS75viL4CoPAqFiVyhyCCh5am75h9F7vTBq6
|
||||
190017lhu9IgkAkiklnjfDbyXH+BwqJ78nXp6e6R4ShFMHNGGvYLem1wmPKzqPlZ
|
||||
zN0yjc0+d5pw4hu+IEFrM63yqGp2BVX1X132IKUEcROCQt1QOma5oORhYEtSCieX
|
||||
PuhuHJOA7q6nJuFccPCs5OcDS4IbQgGAbWL4L1+LAGVLVGpK4IVtqEZ831Srclh8
|
||||
0ruyFFeV/hqOONThwwile0Jwh5Jz/2sYxT5c+nlumXWK+CXTm4OCfGt1UuGy6c6u
|
||||
Rz84PHfanbKnATp6RUjz4DMREkmA6qBnUFqGLLGaBKBsm42b7kbo7m5aeItuOwLE
|
||||
U7AcnBEqqHLfI7O1zrHKjQCxhEWP/iok0kgEdiJ4tlPhfDjQRG6thlmZnVdt/08V
|
||||
+bvVkbYZyWPzjbG3QHyFew1+uzPHb2UopgpByVKYEWhCgNfcFtE56lEI9c40Ba5o
|
||||
LaZl0VlgfSLP4c+LoFB6gZp1gcVQuPo1JKd1v5WP60f1iHhazL5LEeMYcW6kvujK
|
||||
58Q683gSH5DsVAnxaj1uU4nvtKDh8IF1CNKKXk8RVsltdpv9bGhV8b4qVQARAQAB
|
||||
tD1UZXN0IFNpZ25pbmcgS2V5IChHUEcgY3JlYXRlZCBmb3IgU3BhY2spIDxub2Jv
|
||||
ZHlAbm93aGVyZS5jb20+iQJOBBMBCgA4FiEE6J1JcfAJex56PrVzcbSEgC54180F
|
||||
AmfHlp4CGwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AACgkQcbSEgC54180aDg//
|
||||
f7GqIW5LzYqIqkey+IjdkSSfeD47tlWc2ukKYStHu0gTlHhrUp4rHNJ/s8XQ1o6o
|
||||
jwzWfNMYh68wt9sjuM2BEkkh3RUFEjVqqW+k562gS5ibfKTDtJb2Yj0n/CQKWvoi
|
||||
vUUzO88xW0AnZFieP+vD5iI5Zw4H2dY8cH4X1XlWAJufFdH4WBaZjujNwNOcCsnd
|
||||
w2nE050wKTR2wroWq0HKn1Ni3QNtKWPpLoHGAlhW6ACLa+EFqxHU6D3KhW6IV4Jc
|
||||
sdt36nHNiRiy6nT99asqtN6Z0Yw+EnQSuIDosIbmSgZoieINh0gU6AKwgydxLUxL
|
||||
Cu1w2fZHGuFR/ym0c/tTpM893DxHMc/EZ/SpU8fXkC9lYnQO3or/Y0mLHd0kSEv7
|
||||
XoonvcOu1tOQzmvrvUQUtTn4+6OKpGViyZG5C8Lbk8/yKWFv5b+Gpss/EiGTHSsk
|
||||
bPTHf5jMsWElv0GgFq2TpybtIcY52yJoZ1fBMEA9Nk76Y/MNFlN0d7HyS6tWGr6E
|
||||
8FWJB7RYG5XHMEDIKSheq+Q5cORwz92JPFI+sovZukp+20G7f7/gwos441KamJPc
|
||||
y1+M4uO21aKX2fA07bcgFtm25gNLoHyvjQLcmyDis6xogvciCV3iQ/mtunewgYp/
|
||||
lUX1dv0R5o8TteaAIkbJicbdLtur/iuAWN404E/QShc=
|
||||
=8P00
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
@@ -1 +1 @@
|
||||
{"keys":{"CBAB2C1032C6FF5078049EC0FA61D50C12CAD37E":{}}}
|
||||
{"keys":{"A98A04B882E19D85FD36EE069565D80B055C92FF":{},"E89D4971F0097B1E7A3EB57371B484802E78D7CD":{}}}
|
||||
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user